chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

162
vendor/time-macros/src/date.rs vendored Normal file
View File

@@ -0,0 +1,162 @@
use std::iter::Peekable;
use num_conv::Truncate;
use proc_macro::token_stream;
use time_core::util::{days_in_year, weeks_in_year};
use crate::Error;
use crate::helpers::{consume_number, consume_punct, days_in_year_month, ymd_to_yo, ywd_to_yo};
use crate::to_tokens::ToTokenStream;
#[cfg(feature = "large-dates")]
const MAX_YEAR: i32 = 999_999;
#[cfg(not(feature = "large-dates"))]
const MAX_YEAR: i32 = 9_999;
pub(crate) struct Date {
pub(crate) year: i32,
pub(crate) ordinal: u16,
}
pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Date, Error> {
let (year_sign_span, year_sign, explicit_sign) = if let Ok(span) = consume_punct('-', chars) {
(Some(span), -1, true)
} else if let Ok(span) = consume_punct('+', chars) {
(Some(span), 1, true)
} else {
(None, 1, false)
};
let (year_span, mut year) = consume_number::<i32>("year", chars)?;
year *= year_sign;
if year.abs() > MAX_YEAR {
return Err(Error::InvalidComponent {
name: "year",
value: year.to_string(),
span_start: Some(year_sign_span.unwrap_or_else(|| year_span.start())),
span_end: Some(year_span.end()),
});
}
if !explicit_sign && year.abs() >= 10_000 {
return Err(Error::Custom {
message: "years with more than four digits must have an explicit sign".into(),
span_start: Some(year_sign_span.unwrap_or_else(|| year_span.start())),
span_end: Some(year_span.end()),
});
}
consume_punct('-', chars)?;
// year-week-day
if let Some(proc_macro::TokenTree::Ident(ident)) = chars.peek()
&& let s = ident.to_string()
&& s.starts_with('W')
{
let w_span = ident.span();
drop(chars.next()); // consume 'W' and possibly the week number
let (week_span, week, day_span, day);
if s.len() == 1 {
(week_span, week) = consume_number::<u8>("week", chars)?;
consume_punct('-', chars)?;
(day_span, day) = consume_number::<u8>("day", chars)?;
} else {
let presumptive_week = &s[1..];
if presumptive_week.bytes().all(|d| d.is_ascii_digit())
&& let Ok(week_number) = presumptive_week.replace('_', "").parse()
{
(week_span, week) = (w_span, week_number);
consume_punct('-', chars)?;
(day_span, day) = consume_number::<u8>("day", chars)?;
} else {
return Err(Error::InvalidComponent {
name: "week",
value: presumptive_week.to_string(),
span_start: Some(w_span.start()),
span_end: Some(w_span.end()),
});
}
};
if week > weeks_in_year(year) {
return Err(Error::InvalidComponent {
name: "week",
value: week.to_string(),
span_start: Some(w_span.start()),
span_end: Some(week_span.end()),
});
}
if day == 0 || day > 7 {
return Err(Error::InvalidComponent {
name: "day",
value: day.to_string(),
span_start: Some(day_span.start()),
span_end: Some(day_span.end()),
});
}
let (year, ordinal) = ywd_to_yo(year, week, day);
return Ok(Date { year, ordinal });
}
// We don't yet know whether it's year-month-day or year-ordinal.
let (month_or_ordinal_span, month_or_ordinal) =
consume_number::<u16>("month or ordinal", chars)?;
// year-month-day
if consume_punct('-', chars).is_ok() {
let (month_span, month) = (month_or_ordinal_span, month_or_ordinal);
let (day_span, day) = consume_number::<u8>("day", chars)?;
if month == 0 || month > 12 {
return Err(Error::InvalidComponent {
name: "month",
value: month.to_string(),
span_start: Some(month_span.start()),
span_end: Some(month_span.end()),
});
}
let month = month.truncate();
if day == 0 || day > days_in_year_month(year, month) {
return Err(Error::InvalidComponent {
name: "day",
value: day.to_string(),
span_start: Some(day_span.start()),
span_end: Some(day_span.end()),
});
}
let (year, ordinal) = ymd_to_yo(year, month, day);
Ok(Date { year, ordinal })
}
// year-ordinal
else {
let (ordinal_span, ordinal) = (month_or_ordinal_span, month_or_ordinal);
if ordinal == 0 || ordinal > days_in_year(year) {
return Err(Error::InvalidComponent {
name: "ordinal",
value: ordinal.to_string(),
span_start: Some(ordinal_span.start()),
span_end: Some(ordinal_span.end()),
});
}
Ok(Date { year, ordinal })
}
}
impl ToTokenStream for Date {
fn append_to(self, ts: &mut proc_macro::TokenStream) {
quote_append! { ts
unsafe {
::time::Date::__from_ordinal_date_unchecked(
#(self.year),
#(self.ordinal),
)
}
}
}
}

50
vendor/time-macros/src/datetime.rs vendored Normal file
View File

@@ -0,0 +1,50 @@
use std::iter::Peekable;
use proc_macro::{TokenStream, token_stream};
use crate::date::Date;
use crate::error::Error;
use crate::offset::Offset;
use crate::time::Time;
use crate::to_tokens::ToTokenStream;
use crate::{date, offset, time};
pub(crate) struct DateTime {
date: Date,
time: Time,
offset: Option<Offset>,
}
pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<DateTime, Error> {
let date = date::parse(chars)?;
let time = time::parse(chars)?;
let offset = match offset::parse(chars) {
Ok(offset) => Some(offset),
Err(Error::UnexpectedEndOfInput | Error::MissingComponent { name: "sign", .. }) => None,
Err(err) => return Err(err),
};
if let Some(token) = chars.peek() {
return Err(Error::UnexpectedToken {
tree: token.clone(),
});
}
Ok(DateTime { date, time, offset })
}
impl ToTokenStream for DateTime {
fn append_to(self, ts: &mut TokenStream) {
let maybe_offset = match self.offset {
Some(offset) => quote_! { .assume_offset(#S(offset)) },
None => quote_! {},
};
quote_append! { ts
::time::PrimitiveDateTime::new(
#S(self.date),
#S(self.time),
) #S(maybe_offset)
}
}
}

123
vendor/time-macros/src/error.rs vendored Normal file
View File

@@ -0,0 +1,123 @@
use std::borrow::Cow;
use std::fmt;
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
trait WithSpan {
fn with_span(self, span: Span) -> Self;
}
impl WithSpan for TokenTree {
fn with_span(mut self, span: Span) -> Self {
self.set_span(span);
self
}
}
pub(crate) enum Error {
MissingComponent {
name: &'static str,
span_start: Option<Span>,
span_end: Option<Span>,
},
InvalidComponent {
name: &'static str,
value: String,
span_start: Option<Span>,
span_end: Option<Span>,
},
#[cfg(any(feature = "formatting", feature = "parsing"))]
ExpectedString {
span_start: Option<Span>,
span_end: Option<Span>,
},
UnexpectedToken {
tree: TokenTree,
},
UnexpectedEndOfInput,
Custom {
message: Cow<'static, str>,
span_start: Option<Span>,
span_end: Option<Span>,
},
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::MissingComponent { name, .. } => write!(f, "missing component: {name}"),
Self::InvalidComponent { name, value, .. } => {
write!(f, "invalid component: {name} was {value}")
}
#[cfg(any(feature = "formatting", feature = "parsing"))]
Self::ExpectedString { .. } => f.write_str("expected string literal"),
Self::UnexpectedToken { tree } => write!(f, "unexpected token: {tree}"),
Self::UnexpectedEndOfInput => f.write_str("unexpected end of input"),
Self::Custom { message, .. } => f.write_str(message),
}
}
}
impl Error {
fn span_start(&self) -> Span {
match self {
Self::MissingComponent { span_start, .. }
| Self::InvalidComponent { span_start, .. }
| Self::Custom { span_start, .. } => *span_start,
#[cfg(any(feature = "formatting", feature = "parsing"))]
Self::ExpectedString { span_start, .. } => *span_start,
Self::UnexpectedToken { tree } => Some(tree.span()),
Self::UnexpectedEndOfInput => Some(Span::mixed_site()),
}
.unwrap_or_else(Span::mixed_site)
}
fn span_end(&self) -> Span {
match self {
Self::MissingComponent { span_end, .. }
| Self::InvalidComponent { span_end, .. }
| Self::Custom { span_end, .. } => *span_end,
#[cfg(any(feature = "formatting", feature = "parsing"))]
Self::ExpectedString { span_end, .. } => *span_end,
Self::UnexpectedToken { tree, .. } => Some(tree.span()),
Self::UnexpectedEndOfInput => Some(Span::mixed_site()),
}
.unwrap_or_else(|| self.span_start())
}
pub(crate) fn to_compile_error(&self) -> TokenStream {
let (start, end) = (self.span_start(), self.span_end());
[
TokenTree::from(Punct::new(':', Spacing::Joint)).with_span(start),
TokenTree::from(Punct::new(':', Spacing::Alone)).with_span(start),
TokenTree::from(Ident::new("core", start)),
TokenTree::from(Punct::new(':', Spacing::Joint)).with_span(start),
TokenTree::from(Punct::new(':', Spacing::Alone)).with_span(start),
TokenTree::from(Ident::new("compile_error", start)),
TokenTree::from(Punct::new('!', Spacing::Alone)).with_span(start),
TokenTree::from(Group::new(
Delimiter::Parenthesis,
TokenStream::from(
TokenTree::from(Literal::string(&self.to_string())).with_span(end),
),
))
.with_span(end),
]
.iter()
.cloned()
.collect()
}
/// Like `to_compile_error`, but for use in macros that produce items.
#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
pub(crate) fn to_compile_error_standalone(&self) -> TokenStream {
let end = self.span_end();
self.to_compile_error()
.into_iter()
.chain(std::iter::once(
TokenTree::from(Punct::new(';', Spacing::Alone)).with_span(end),
))
.collect()
}
}

View File

@@ -0,0 +1,252 @@
use std::iter;
use super::{Error, Location, Spanned, SpannedValue, Unused, lexer, unused};
pub(super) enum Item<'a> {
Literal(Spanned<&'a [u8]>),
EscapedBracket {
_first: Unused<Location>,
_second: Unused<Location>,
},
Component {
_opening_bracket: Unused<Location>,
_leading_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
name: Spanned<&'a [u8]>,
modifiers: Box<[Modifier<'a>]>,
_trailing_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
_closing_bracket: Unused<Location>,
},
Optional {
opening_bracket: Location,
_leading_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
_optional_kw: Unused<Spanned<&'a [u8]>>,
_whitespace: Unused<Spanned<&'a [u8]>>,
nested_format_description: NestedFormatDescription<'a>,
closing_bracket: Location,
},
First {
opening_bracket: Location,
_leading_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
_first_kw: Unused<Spanned<&'a [u8]>>,
_whitespace: Unused<Spanned<&'a [u8]>>,
nested_format_descriptions: Box<[NestedFormatDescription<'a>]>,
closing_bracket: Location,
},
}
pub(super) struct NestedFormatDescription<'a> {
pub(super) _opening_bracket: Unused<Location>,
pub(super) items: Box<[Item<'a>]>,
pub(super) _closing_bracket: Unused<Location>,
pub(super) _trailing_whitespace: Unused<Option<Spanned<&'a [u8]>>>,
}
pub(super) struct Modifier<'a> {
pub(super) _leading_whitespace: Unused<Spanned<&'a [u8]>>,
pub(super) key: Spanned<&'a [u8]>,
pub(super) _colon: Unused<Location>,
pub(super) value: Spanned<&'a [u8]>,
}
pub(super) fn parse<
'item: 'iter,
'iter,
I: Iterator<Item = Result<lexer::Token<'item>, Error>>,
const VERSION: u8,
>(
tokens: &'iter mut lexer::Lexed<I>,
) -> impl Iterator<Item = Result<Item<'item>, Error>> + use<'item, 'iter, I, VERSION> {
assert!(version!(1..=2));
parse_inner::<_, false, VERSION>(tokens)
}
fn parse_inner<
'item,
I: Iterator<Item = Result<lexer::Token<'item>, Error>>,
const NESTED: bool,
const VERSION: u8,
>(
tokens: &mut lexer::Lexed<I>,
) -> impl Iterator<Item = Result<Item<'item>, Error>> + use<'_, 'item, I, NESTED, VERSION> {
iter::from_fn(move || {
if NESTED && tokens.peek_closing_bracket().is_some() {
return None;
}
let next = match tokens.next()? {
Ok(token) => token,
Err(err) => return Some(Err(err)),
};
Some(match next {
lexer::Token::Literal(Spanned { value: _, span: _ }) if NESTED => {
bug!("literal should not be present in nested description")
}
lexer::Token::Literal(value) => Ok(Item::Literal(value)),
lexer::Token::Bracket {
kind: lexer::BracketKind::Opening,
location,
} => {
if version!(..=1) {
if let Some(second_location) = tokens.next_if_opening_bracket() {
Ok(Item::EscapedBracket {
_first: unused(location),
_second: unused(second_location),
})
} else {
parse_component::<_, VERSION>(location, tokens)
}
} else {
parse_component::<_, VERSION>(location, tokens)
}
}
lexer::Token::Bracket {
kind: lexer::BracketKind::Closing,
location: _,
} if NESTED => {
bug!("closing bracket should be caught by the `if` statement")
}
lexer::Token::Bracket {
kind: lexer::BracketKind::Closing,
location: _,
} => {
bug!("closing bracket should have been consumed by `parse_component`")
}
lexer::Token::ComponentPart { kind: _, value } if NESTED => Ok(Item::Literal(value)),
lexer::Token::ComponentPart { kind: _, value: _ } => {
bug!("component part should have been consumed by `parse_component`")
}
})
})
}
fn parse_component<'a, I: Iterator<Item = Result<lexer::Token<'a>, Error>>, const VERSION: u8>(
opening_bracket: Location,
tokens: &mut lexer::Lexed<I>,
) -> Result<Item<'a>, Error> {
let leading_whitespace = tokens.next_if_whitespace();
let Some(name) = tokens.next_if_not_whitespace() else {
let span = match leading_whitespace {
Some(Spanned { value: _, span }) => span,
None => opening_bracket.to(opening_bracket),
};
return Err(span.error("expected component name"));
};
if *name == b"optional" {
let Some(whitespace) = tokens.next_if_whitespace() else {
return Err(name.span.error("expected whitespace after `optional`"));
};
let nested = parse_nested::<_, VERSION>(whitespace.span.end, tokens)?;
let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
return Err(opening_bracket.error("unclosed bracket"));
};
return Ok(Item::Optional {
opening_bracket,
_leading_whitespace: unused(leading_whitespace),
_optional_kw: unused(name),
_whitespace: unused(whitespace),
nested_format_description: nested,
closing_bracket,
});
}
if *name == b"first" {
let Some(whitespace) = tokens.next_if_whitespace() else {
return Err(name.span.error("expected whitespace after `first`"));
};
let mut nested_format_descriptions = Vec::new();
while let Ok(description) = parse_nested::<_, VERSION>(whitespace.span.end, tokens) {
nested_format_descriptions.push(description);
}
let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
return Err(opening_bracket.error("unclosed bracket"));
};
return Ok(Item::First {
opening_bracket,
_leading_whitespace: unused(leading_whitespace),
_first_kw: unused(name),
_whitespace: unused(whitespace),
nested_format_descriptions: nested_format_descriptions.into_boxed_slice(),
closing_bracket,
});
}
let mut modifiers = Vec::new();
let trailing_whitespace = loop {
let Some(whitespace) = tokens.next_if_whitespace() else {
break None;
};
if let Some(location) = tokens.next_if_opening_bracket() {
return Err(location
.to(location)
.error("modifier must be of the form `key:value`"));
}
let Some(Spanned { value, span }) = tokens.next_if_not_whitespace() else {
break Some(whitespace);
};
let Some(colon_index) = value.iter().position(|&b| b == b':') else {
return Err(span.error("modifier must be of the form `key:value`"));
};
let key = &value[..colon_index];
let value = &value[colon_index + 1..];
if key.is_empty() {
return Err(span.shrink_to_start().error("expected modifier key"));
}
if value.is_empty() {
return Err(span.shrink_to_end().error("expected modifier value"));
}
modifiers.push(Modifier {
_leading_whitespace: unused(whitespace),
key: key.spanned(span.shrink_to_before(colon_index as u32)),
_colon: unused(span.start.offset(colon_index as u32)),
value: value.spanned(span.shrink_to_after(colon_index as u32)),
});
};
let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
return Err(opening_bracket.error("unclosed bracket"));
};
Ok(Item::Component {
_opening_bracket: unused(opening_bracket),
_leading_whitespace: unused(leading_whitespace),
name,
modifiers: modifiers.into_boxed_slice(),
_trailing_whitespace: unused(trailing_whitespace),
_closing_bracket: unused(closing_bracket),
})
}
fn parse_nested<'a, I: Iterator<Item = Result<lexer::Token<'a>, Error>>, const VERSION: u8>(
last_location: Location,
tokens: &mut lexer::Lexed<I>,
) -> Result<NestedFormatDescription<'a>, Error> {
let Some(opening_bracket) = tokens.next_if_opening_bracket() else {
return Err(last_location.error("expected opening bracket"));
};
let items = parse_inner::<_, true, VERSION>(tokens).collect::<Result<_, _>>()?;
let Some(closing_bracket) = tokens.next_if_closing_bracket() else {
return Err(opening_bracket.error("unclosed bracket"));
};
let trailing_whitespace = tokens.next_if_whitespace();
Ok(NestedFormatDescription {
_opening_bracket: unused(opening_bracket),
items,
_closing_bracket: unused(closing_bracket),
_trailing_whitespace: unused(trailing_whitespace),
})
}

View File

@@ -0,0 +1,454 @@
use std::num::NonZero;
use std::str::{self, FromStr};
use super::{Error, Span, Spanned, Unused, ast, unused};
pub(super) fn parse<'a>(
ast_items: impl Iterator<Item = Result<ast::Item<'a>, Error>>,
) -> impl Iterator<Item = Result<Item<'a>, Error>> {
ast_items.map(|ast_item| ast_item.and_then(Item::from_ast))
}
pub(super) enum Item<'a> {
Literal(&'a [u8]),
Component(Component),
Optional {
value: Box<[Self]>,
_span: Unused<Span>,
},
First {
value: Box<[Box<[Self]>]>,
_span: Unused<Span>,
},
}
impl Item<'_> {
pub(super) fn from_ast(ast_item: ast::Item<'_>) -> Result<Item<'_>, Error> {
Ok(match ast_item {
ast::Item::Component {
_opening_bracket: _,
_leading_whitespace: _,
name,
modifiers,
_trailing_whitespace: _,
_closing_bracket: _,
} => Item::Component(component_from_ast(&name, &modifiers)?),
ast::Item::Literal(Spanned { value, span: _ }) => Item::Literal(value),
ast::Item::EscapedBracket {
_first: _,
_second: _,
} => Item::Literal(b"["),
ast::Item::Optional {
opening_bracket,
_leading_whitespace: _,
_optional_kw: _,
_whitespace: _,
nested_format_description,
closing_bracket,
} => {
let items = nested_format_description
.items
.into_vec()
.into_iter()
.map(Item::from_ast)
.collect::<Result<_, _>>()?;
Item::Optional {
value: items,
_span: unused(opening_bracket.to(closing_bracket)),
}
}
ast::Item::First {
opening_bracket,
_leading_whitespace: _,
_first_kw: _,
_whitespace: _,
nested_format_descriptions,
closing_bracket,
} => {
let items = nested_format_descriptions
.into_vec()
.into_iter()
.map(|nested_format_description| {
nested_format_description
.items
.into_vec()
.into_iter()
.map(Item::from_ast)
.collect()
})
.collect::<Result<_, _>>()?;
Item::First {
value: items,
_span: unused(opening_bracket.to(closing_bracket)),
}
}
})
}
}
impl From<Item<'_>> for crate::format_description::public::OwnedFormatItem {
fn from(item: Item<'_>) -> Self {
match item {
Item::Literal(literal) => Self::Literal(literal.to_vec().into_boxed_slice()),
Item::Component(component) => Self::Component(component.into()),
Item::Optional { value, _span: _ } => Self::Optional(Box::new(value.into())),
Item::First { value, _span: _ } => {
Self::First(value.into_vec().into_iter().map(Into::into).collect())
}
}
}
}
impl<'a> From<Box<[Item<'a>]>> for crate::format_description::public::OwnedFormatItem {
fn from(items: Box<[Item<'a>]>) -> Self {
let items = items.into_vec();
match <[_; 1]>::try_from(items) {
Ok([item]) => item.into(),
Err(vec) => Self::Compound(vec.into_iter().map(Into::into).collect()),
}
}
}
macro_rules! component_definition {
(@if_required required then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($then)* };
(@if_required then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($($else)*)? };
(@if_from_str from_str then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($then)* };
(@if_from_str then { $($then:tt)* } $(else { $($else:tt)* })?) => { $($($else)*)? };
($vis:vis enum $name:ident {
$($variant:ident = $parse_variant:literal {$(
$(#[$required:tt])?
$field:ident = $parse_field:literal:
Option<$(#[$from_str:tt])? $field_type:ty>
=> $target_field:ident
),* $(,)?}),* $(,)?
}) => {
$vis enum $name {
$($variant($variant),)*
}
$($vis struct $variant {
$($field: Option<$field_type>),*
})*
$(impl $variant {
fn with_modifiers(
modifiers: &[ast::Modifier<'_>],
_component_span: Span,
) -> Result<Self, Error>
{
#[allow(unused_mut)]
let mut this = Self {
$($field: None),*
};
for modifier in modifiers {
$(#[allow(clippy::string_lit_as_bytes)]
if modifier.key.eq_ignore_ascii_case($parse_field.as_bytes()) {
this.$field = component_definition!(@if_from_str $($from_str)?
then {
parse_from_modifier_value::<$field_type>(&modifier.value)?
} else {
<$field_type>::from_modifier_value(&modifier.value)?
});
continue;
})*
return Err(modifier.key.span.error("invalid modifier key"));
}
$(component_definition! { @if_required $($required)? then {
if this.$field.is_none() {
return Err(_component_span.error("missing required modifier"));
}
}})*
Ok(this)
}
})*
impl From<$name> for crate::format_description::public::Component {
fn from(component: $name) -> Self {
match component {$(
$name::$variant($variant { $($field),* }) => {
$crate::format_description::public::Component::$variant(
super::public::modifier::$variant {$(
$target_field: component_definition! { @if_required $($required)?
then {
match $field {
Some(value) => value.into(),
None => bug!("required modifier was not set"),
}
} else {
$field.unwrap_or_default().into()
}
}
),*}
)
}
)*}
}
}
fn component_from_ast(
name: &Spanned<&[u8]>,
modifiers: &[ast::Modifier<'_>],
) -> Result<Component, Error> {
$(#[allow(clippy::string_lit_as_bytes)]
if name.eq_ignore_ascii_case($parse_variant.as_bytes()) {
return Ok(Component::$variant($variant::with_modifiers(&modifiers, name.span)?));
})*
Err(name.span.error("invalid component"))
}
}
}
component_definition! {
pub(super) enum Component {
Day = "day" {
padding = "padding": Option<Padding> => padding,
},
End = "end" {
trailing_input = "trailing_input": Option<TrailingInput> => trailing_input,
},
Hour = "hour" {
padding = "padding": Option<Padding> => padding,
base = "repr": Option<HourBase> => is_12_hour_clock,
},
Ignore = "ignore" {
#[required]
count = "count": Option<#[from_str] NonZero<u16>> => count,
},
Minute = "minute" {
padding = "padding": Option<Padding> => padding,
},
Month = "month" {
padding = "padding": Option<Padding> => padding,
repr = "repr": Option<MonthRepr> => repr,
case_sensitive = "case_sensitive": Option<MonthCaseSensitive> => case_sensitive,
},
OffsetHour = "offset_hour" {
sign_behavior = "sign": Option<SignBehavior> => sign_is_mandatory,
padding = "padding": Option<Padding> => padding,
},
OffsetMinute = "offset_minute" {
padding = "padding": Option<Padding> => padding,
},
OffsetSecond = "offset_second" {
padding = "padding": Option<Padding> => padding,
},
Ordinal = "ordinal" {
padding = "padding": Option<Padding> => padding,
},
Period = "period" {
case = "case": Option<PeriodCase> => is_uppercase,
case_sensitive = "case_sensitive": Option<PeriodCaseSensitive> => case_sensitive,
},
Second = "second" {
padding = "padding": Option<Padding> => padding,
},
Subsecond = "subsecond" {
digits = "digits": Option<SubsecondDigits> => digits,
},
UnixTimestamp = "unix_timestamp" {
precision = "precision": Option<UnixTimestampPrecision> => precision,
sign_behavior = "sign": Option<SignBehavior> => sign_is_mandatory,
},
Weekday = "weekday" {
repr = "repr": Option<WeekdayRepr> => repr,
one_indexed = "one_indexed": Option<WeekdayOneIndexed> => one_indexed,
case_sensitive = "case_sensitive": Option<WeekdayCaseSensitive> => case_sensitive,
},
WeekNumber = "week_number" {
padding = "padding": Option<Padding> => padding,
repr = "repr": Option<WeekNumberRepr> => repr,
},
Year = "year" {
padding = "padding": Option<Padding> => padding,
repr = "repr": Option<YearRepr> => repr,
range = "range": Option<YearRange> => range,
base = "base": Option<YearBase> => iso_week_based,
sign_behavior = "sign": Option<SignBehavior> => sign_is_mandatory,
},
}
}
macro_rules! target_ty {
($name:ident $type:ty) => {
$type
};
($name:ident) => {
super::public::modifier::$name
};
}
/// Get the target value for a given enum.
macro_rules! target_value {
($name:ident $variant:ident $value:expr) => {
$value
};
($name:ident $variant:ident) => {
super::public::modifier::$name::$variant
};
}
macro_rules! modifier {
($(
enum $name:ident $(($target_ty:ty))? {
$(
$(#[$attr:meta])?
$variant:ident $(($target_value:expr))? = $parse_variant:literal
),* $(,)?
}
)+) => {$(
#[derive(Default)]
enum $name {
$($(#[$attr])? $variant),*
}
impl $name {
/// Parse the modifier from its string representation.
fn from_modifier_value(value: &Spanned<&[u8]>) -> Result<Option<Self>, Error> {
$(if value.eq_ignore_ascii_case($parse_variant) {
return Ok(Some(Self::$variant));
})*
Err(value.span.error("invalid modifier value"))
}
}
impl From<$name> for target_ty!($name $($target_ty)?) {
fn from(modifier: $name) -> Self {
match modifier {
$($name::$variant => target_value!($name $variant $($target_value)?)),*
}
}
}
)+};
}
modifier! {
enum HourBase(bool) {
Twelve(true) = b"12",
#[default]
TwentyFour(false) = b"24",
}
enum MonthCaseSensitive(bool) {
False(false) = b"false",
#[default]
True(true) = b"true",
}
enum MonthRepr {
#[default]
Numerical = b"numerical",
Long = b"long",
Short = b"short",
}
enum Padding {
Space = b"space",
#[default]
Zero = b"zero",
None = b"none",
}
enum PeriodCase(bool) {
Lower(false) = b"lower",
#[default]
Upper(true) = b"upper",
}
enum PeriodCaseSensitive(bool) {
False(false) = b"false",
#[default]
True(true) = b"true",
}
enum SignBehavior(bool) {
#[default]
Automatic(false) = b"automatic",
Mandatory(true) = b"mandatory",
}
enum SubsecondDigits {
One = b"1",
Two = b"2",
Three = b"3",
Four = b"4",
Five = b"5",
Six = b"6",
Seven = b"7",
Eight = b"8",
Nine = b"9",
#[default]
OneOrMore = b"1+",
}
enum TrailingInput {
#[default]
Prohibit = b"prohibit",
Discard = b"discard",
}
enum UnixTimestampPrecision {
#[default]
Second = b"second",
Millisecond = b"millisecond",
Microsecond = b"microsecond",
Nanosecond = b"nanosecond",
}
enum WeekNumberRepr {
#[default]
Iso = b"iso",
Sunday = b"sunday",
Monday = b"monday",
}
enum WeekdayCaseSensitive(bool) {
False(false) = b"false",
#[default]
True(true) = b"true",
}
enum WeekdayOneIndexed(bool) {
False(false) = b"false",
#[default]
True(true) = b"true",
}
enum WeekdayRepr {
Short = b"short",
#[default]
Long = b"long",
Sunday = b"sunday",
Monday = b"monday",
}
enum YearBase(bool) {
#[default]
Calendar(false) = b"calendar",
IsoWeek(true) = b"iso_week",
}
enum YearRepr {
#[default]
Full = b"full",
Century = b"century",
LastTwo = b"last_two",
}
enum YearRange {
Standard = b"standard",
#[default]
Extended = b"extended",
}
}
fn parse_from_modifier_value<T: FromStr>(value: &Spanned<&[u8]>) -> Result<Option<T>, Error> {
str::from_utf8(value)
.ok()
.and_then(|val| val.parse::<T>().ok())
.map(|val| Some(val))
.ok_or_else(|| value.span.error("invalid modifier value"))
}

View File

@@ -0,0 +1,245 @@
use core::iter;
use super::{Error, Location, Spanned, SpannedValue};
pub(super) struct Lexed<I: Iterator> {
iter: iter::Peekable<I>,
}
impl<I: Iterator> Iterator for Lexed<I> {
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
}
impl<'iter, 'token: 'iter, I: Iterator<Item = Result<Token<'token>, Error>> + 'iter> Lexed<I> {
pub(super) fn peek(&mut self) -> Option<&I::Item> {
self.iter.peek()
}
pub(super) fn next_if_whitespace(&mut self) -> Option<Spanned<&'token [u8]>> {
if let Some(&Ok(Token::ComponentPart {
kind: ComponentKind::Whitespace,
value,
})) = self.peek()
{
self.next(); // consume
Some(value)
} else {
None
}
}
pub(super) fn next_if_not_whitespace(&mut self) -> Option<Spanned<&'token [u8]>> {
if let Some(&Ok(Token::ComponentPart {
kind: ComponentKind::NotWhitespace,
value,
})) = self.peek()
{
self.next();
Some(value)
} else {
None
}
}
pub(super) fn next_if_opening_bracket(&mut self) -> Option<Location> {
if let Some(&Ok(Token::Bracket {
kind: BracketKind::Opening,
location,
})) = self.peek()
{
self.next();
Some(location)
} else {
None
}
}
pub(super) fn peek_closing_bracket(&'iter mut self) -> Option<&'iter Location> {
if let Some(Ok(Token::Bracket {
kind: BracketKind::Closing,
location,
})) = self.peek()
{
Some(location)
} else {
None
}
}
pub(super) fn next_if_closing_bracket(&mut self) -> Option<Location> {
if let Some(&Ok(Token::Bracket {
kind: BracketKind::Closing,
location,
})) = self.peek()
{
self.next();
Some(location)
} else {
None
}
}
}
pub(super) enum Token<'a> {
Literal(Spanned<&'a [u8]>),
Bracket {
kind: BracketKind,
location: Location,
},
ComponentPart {
kind: ComponentKind,
value: Spanned<&'a [u8]>,
},
}
pub(super) enum BracketKind {
Opening,
Closing,
}
pub(super) enum ComponentKind {
Whitespace,
NotWhitespace,
}
fn attach_location<'item>(
iter: impl Iterator<Item = &'item u8>,
proc_span: proc_macro::Span,
) -> impl Iterator<Item = (&'item u8, Location)> {
let mut byte_pos = 0;
iter.map(move |byte| {
let location = Location {
byte: byte_pos,
proc_span,
};
byte_pos += 1;
(byte, location)
})
}
pub(super) fn lex<const VERSION: u8>(
mut input: &[u8],
proc_span: proc_macro::Span,
) -> Lexed<impl Iterator<Item = Result<Token<'_>, Error>>> {
assert!(version!(1..=2));
let mut depth: u32 = 0;
let mut iter = attach_location(input.iter(), proc_span).peekable();
let mut second_bracket_location = None;
let iter = iter::from_fn(move || {
if version!(..=1)
&& let Some(location) = second_bracket_location.take()
{
return Some(Ok(Token::Bracket {
kind: BracketKind::Opening,
location,
}));
}
Some(Ok(match iter.next()? {
(b'\\', backslash_loc) if version!(2..) => match iter.next() {
Some((b'\\' | b'[' | b']', char_loc)) => {
let char = &input[1..2];
input = &input[2..];
if depth == 0 {
Token::Literal(char.spanned(backslash_loc.to(char_loc)))
} else {
Token::ComponentPart {
kind: ComponentKind::NotWhitespace,
value: char.spanned(backslash_loc.to(char_loc)),
}
}
}
Some((_, loc)) => {
return Some(Err(loc.error("invalid escape sequence")));
}
None => {
return Some(Err(backslash_loc.error("unexpected end of input")));
}
},
(b'[', location) if version!(..=1) => {
if let Some((_, second_location)) = iter.next_if(|&(&byte, _)| byte == b'[') {
second_bracket_location = Some(second_location);
input = &input[2..];
} else {
depth += 1;
input = &input[1..];
}
Token::Bracket {
kind: BracketKind::Opening,
location,
}
}
(b'[', location) => {
depth += 1;
input = &input[1..];
Token::Bracket {
kind: BracketKind::Opening,
location,
}
}
(b']', location) if depth > 0 => {
depth -= 1;
input = &input[1..];
Token::Bracket {
kind: BracketKind::Closing,
location,
}
}
(_, start_location) if depth == 0 => {
let mut bytes = 1;
let mut end_location = start_location;
while let Some((_, location)) =
iter.next_if(|&(&byte, _)| !((version!(2..) && byte == b'\\') || byte == b'['))
{
end_location = location;
bytes += 1;
}
let value = &input[..bytes];
input = &input[bytes..];
Token::Literal(value.spanned(start_location.to(end_location)))
}
(byte, start_location) => {
let mut bytes = 1;
let mut end_location = start_location;
let is_whitespace = byte.is_ascii_whitespace();
while let Some((_, location)) = iter.next_if(|&(byte, _)| {
!matches!(byte, b'\\' | b'[' | b']')
&& is_whitespace == byte.is_ascii_whitespace()
}) {
end_location = location;
bytes += 1;
}
let value = &input[..bytes];
input = &input[bytes..];
Token::ComponentPart {
kind: if is_whitespace {
ComponentKind::Whitespace
} else {
ComponentKind::NotWhitespace
},
value: value.spanned(start_location.to(end_location)),
}
}
}))
});
Lexed {
iter: iter.peekable(),
}
}

View File

@@ -0,0 +1,166 @@
//! Parser for format descriptions.
macro_rules! version {
($range:expr) => {
$range.contains(&VERSION)
};
}
mod ast;
mod format_item;
mod lexer;
mod public;
pub(crate) fn parse_with_version(
version: Option<crate::FormatDescriptionVersion>,
s: &[u8],
proc_span: proc_macro::Span,
) -> Result<Vec<public::OwnedFormatItem>, crate::Error> {
match version {
Some(crate::FormatDescriptionVersion::V1) | None => parse::<1>(s, proc_span),
Some(crate::FormatDescriptionVersion::V2) => parse::<2>(s, proc_span),
}
}
fn parse<const VERSION: u8>(
s: &[u8],
proc_span: proc_macro::Span,
) -> Result<Vec<public::OwnedFormatItem>, crate::Error> {
let mut lexed = lexer::lex::<VERSION>(s, proc_span);
let ast = ast::parse::<_, VERSION>(&mut lexed);
let format_items = format_item::parse(ast);
Ok(format_items
.map(|res| res.map(Into::into))
.collect::<Result<_, _>>()?)
}
#[derive(Clone, Copy)]
struct Location {
byte: u32,
proc_span: proc_macro::Span,
}
impl Location {
fn to(self, end: Self) -> Span {
Span { start: self, end }
}
#[must_use = "this does not modify the original value"]
fn offset(&self, offset: u32) -> Self {
Self {
byte: self.byte + offset,
proc_span: self.proc_span,
}
}
fn error(self, message: &'static str) -> Error {
Error {
message,
_span: unused(Span {
start: self,
end: self,
}),
proc_span: self.proc_span,
}
}
}
#[derive(Clone, Copy)]
struct Span {
start: Location,
end: Location,
}
impl Span {
#[must_use = "this does not modify the original value"]
const fn shrink_to_start(&self) -> Self {
Self {
start: self.start,
end: self.start,
}
}
#[must_use = "this does not modify the original value"]
const fn shrink_to_end(&self) -> Self {
Self {
start: self.end,
end: self.end,
}
}
#[must_use = "this does not modify the original value"]
const fn shrink_to_before(&self, pos: u32) -> Self {
Self {
start: self.start,
end: Location {
byte: self.start.byte + pos - 1,
proc_span: self.start.proc_span,
},
}
}
#[must_use = "this does not modify the original value"]
fn shrink_to_after(&self, pos: u32) -> Self {
Self {
start: Location {
byte: self.start.byte + pos + 1,
proc_span: self.start.proc_span,
},
end: self.end,
}
}
fn error(self, message: &'static str) -> Error {
Error {
message,
_span: unused(self),
proc_span: self.start.proc_span,
}
}
}
#[derive(Clone, Copy)]
struct Spanned<T> {
value: T,
span: Span,
}
impl<T> core::ops::Deref for Spanned<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
trait SpannedValue: Sized {
fn spanned(self, span: Span) -> Spanned<Self>;
}
impl<T> SpannedValue for T {
fn spanned(self, span: Span) -> Spanned<Self> {
Spanned { value: self, span }
}
}
struct Error {
message: &'static str,
_span: Unused<Span>,
proc_span: proc_macro::Span,
}
impl From<Error> for crate::Error {
fn from(error: Error) -> Self {
Self::Custom {
message: error.message.into(),
span_start: Some(error.proc_span),
span_end: Some(error.proc_span),
}
}
}
struct Unused<T>(core::marker::PhantomData<T>);
fn unused<T>(_: T) -> Unused<T> {
Unused(core::marker::PhantomData)
}

View File

@@ -0,0 +1,50 @@
use proc_macro::{Ident, Span, TokenStream};
use super::modifier;
use crate::to_tokens::ToTokenStream;
macro_rules! declare_component {
($($name:ident)*) => {
pub(crate) enum Component {$(
$name(modifier::$name),
)*}
impl ToTokenStream for Component {
fn append_to(self, ts: &mut TokenStream) {
let mut mts = TokenStream::new();
let component = match self {$(
Self::$name(modifier) => {
modifier.append_to(&mut mts);
stringify!($name)
}
)*};
let component = Ident::new(component, Span::mixed_site());
quote_append! { ts
Component::#(component)(#S(mts))
}
}
}
};
}
declare_component! {
Day
Month
Ordinal
Weekday
WeekNumber
Year
Hour
Minute
Period
Second
Subsecond
OffsetHour
OffsetMinute
OffsetSecond
Ignore
UnixTimestamp
End
}

View File

@@ -0,0 +1,51 @@
mod component;
pub(super) mod modifier;
use proc_macro::TokenStream;
pub(crate) use self::component::Component;
use crate::to_tokens::ToTokenStream;
pub(crate) enum OwnedFormatItem {
Literal(Box<[u8]>),
Component(Component),
Compound(Box<[Self]>),
Optional(Box<Self>),
First(Box<[Self]>),
}
impl ToTokenStream for OwnedFormatItem {
fn append_to(self, ts: &mut TokenStream) {
match self {
Self::Literal(bytes) => quote_append! { ts
BorrowedFormatItem::Literal(#(Literal::byte_string(bytes.as_ref())))
},
Self::Component(component) => quote_append! { ts
BorrowedFormatItem::Component { 0: #S(component) }
},
Self::Compound(items) => {
let items = items
.into_vec()
.into_iter()
.map(|item| quote_! { #S(item), })
.collect::<TokenStream>();
quote_append! { ts
BorrowedFormatItem::Compound { 0: &[#S(items)] }
}
}
Self::Optional(item) => quote_append! { ts
BorrowedFormatItem::Optional { 0: &#S(*item) }
},
Self::First(items) => {
let items = items
.into_vec()
.into_iter()
.map(|item| quote_! { #S(item), })
.collect::<TokenStream>();
quote_append! { ts
BorrowedFormatItem::First { 0: &[#S(items)] }
}
}
}
}
}

View File

@@ -0,0 +1,277 @@
use std::num::NonZero;
use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use crate::to_tokens::{ToTokenStream, ToTokenTree};
macro_rules! to_tokens {
(
$(#[$struct_attr:meta])*
$struct_vis:vis struct $struct_name:ident {$(
$(#[$field_attr:meta])*
$field_vis:vis $field_name:ident : $field_ty:ty = $default:pat
),* $(,)?}
) => {
$(#[$struct_attr])*
$struct_vis struct $struct_name {$(
$(#[$field_attr])*
$field_vis $field_name: $field_ty
),*}
impl ToTokenTree for $struct_name {
fn into_token_tree(self) -> TokenTree {
let Self {$($field_name),*} = self;
#[allow(clippy::redundant_pattern_matching)]
if matches!(($(&$field_name,)*), ($($default,)*)) {
return TokenTree::Group(Group::new(
Delimiter::None,
quote_! { $struct_name::default() }
));
}
let mut tokens = quote_! {
$struct_name::default()
};
$(
#[allow(clippy::redundant_pattern_matching)]
if !matches!($field_name, $default) {
let method_name = Ident::new(concat!("with_", stringify!($field_name)), Span::mixed_site());
quote_append!(tokens .#(method_name)(#S($field_name)));
}
)*
TokenTree::Group(Group::new(
Delimiter::Brace,
tokens,
))
}
}
};
(
$(#[$enum_attr:meta])*
$enum_vis:vis enum $enum_name:ident {$(
$(#[$variant_attr:meta])*
$variant_name:ident
),+ $(,)?}
) => {
$(#[$enum_attr])*
$enum_vis enum $enum_name {$(
$(#[$variant_attr])*
$variant_name
),+}
impl ToTokenStream for $enum_name {
fn append_to(self, ts: &mut TokenStream) {
quote_append! { ts
$enum_name::
};
let name = match self {
$(Self::$variant_name => stringify!($variant_name)),+
};
ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
}
}
}
}
to_tokens! {
pub(crate) struct Day {
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) enum MonthRepr {
Numerical,
Long,
Short,
}
}
to_tokens! {
pub(crate) struct Month {
pub(crate) padding: Padding = Padding::Zero,
pub(crate) repr: MonthRepr = MonthRepr::Numerical,
pub(crate) case_sensitive: bool = true,
}
}
to_tokens! {
pub(crate) struct Ordinal {
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) enum WeekdayRepr {
Short,
Long,
Sunday,
Monday,
}
}
to_tokens! {
pub(crate) struct Weekday {
pub(crate) repr: WeekdayRepr = WeekdayRepr::Long,
pub(crate) one_indexed: bool = true,
pub(crate) case_sensitive: bool = true,
}
}
to_tokens! {
pub(crate) enum WeekNumberRepr {
Iso,
Sunday,
Monday,
}
}
to_tokens! {
pub(crate) struct WeekNumber {
pub(crate) padding: Padding = Padding::Zero,
pub(crate) repr: WeekNumberRepr = WeekNumberRepr::Iso,
}
}
to_tokens! {
pub(crate) enum YearRepr {
Full,
Century,
LastTwo,
}
}
to_tokens! {
pub(crate) enum YearRange {
Standard,
Extended,
}
}
to_tokens! {
pub(crate) struct Year {
pub(crate) padding: Padding = Padding::Zero,
pub(crate) repr: YearRepr = YearRepr::Full,
pub(crate) range: YearRange = YearRange::Extended,
pub(crate) iso_week_based: bool = false,
pub(crate) sign_is_mandatory: bool = false,
}
}
to_tokens! {
pub(crate) struct Hour {
pub(crate) padding: Padding = Padding::Zero,
pub(crate) is_12_hour_clock: bool = false,
}
}
to_tokens! {
pub(crate) struct Minute {
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) struct Period {
pub(crate) is_uppercase: bool = true,
pub(crate) case_sensitive: bool = true,
}
}
to_tokens! {
pub(crate) struct Second {
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) enum SubsecondDigits {
One,
Two,
Three,
Four,
Five,
Six,
Seven,
Eight,
Nine,
OneOrMore,
}
}
to_tokens! {
pub(crate) struct Subsecond {
pub(crate) digits: SubsecondDigits = SubsecondDigits::OneOrMore,
}
}
to_tokens! {
pub(crate) struct OffsetHour {
pub(crate) sign_is_mandatory: bool = false,
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) struct OffsetMinute {
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) struct OffsetSecond {
pub(crate) padding: Padding = Padding::Zero,
}
}
to_tokens! {
pub(crate) enum Padding {
Space,
Zero,
None,
}
}
pub(crate) struct Ignore {
pub(crate) count: NonZero<u16>,
}
impl ToTokenTree for Ignore {
fn into_token_tree(self) -> TokenTree {
quote_group! {{
Ignore::count(#(self.count))
}}
}
}
to_tokens! {
pub(crate) enum UnixTimestampPrecision {
Second,
Millisecond,
Microsecond,
Nanosecond,
}
}
to_tokens! {
pub(crate) struct UnixTimestamp {
pub(crate) precision: UnixTimestampPrecision = UnixTimestampPrecision::Second,
pub(crate) sign_is_mandatory: bool = false,
}
}
to_tokens! {
pub(crate) enum TrailingInput {
Prohibit,
Discard,
}
}
to_tokens! {
pub(crate) struct End {
pub(crate) trailing_input: TrailingInput = TrailingInput::Prohibit,
}
}

131
vendor/time-macros/src/helpers/mod.rs vendored Normal file
View File

@@ -0,0 +1,131 @@
#[cfg(any(feature = "formatting", feature = "parsing"))]
mod string;
use std::iter::Peekable;
use std::str::FromStr;
use num_conv::prelude::*;
use proc_macro::{Span, TokenTree, token_stream};
use time_core::util::{days_in_year, is_leap_year};
use crate::Error;
#[cfg(any(feature = "formatting", feature = "parsing"))]
pub(crate) fn get_string_literal(
mut tokens: impl Iterator<Item = TokenTree>,
) -> Result<(Span, Vec<u8>), Error> {
match (tokens.next(), tokens.next()) {
(Some(TokenTree::Literal(literal)), None) => string::parse(&literal),
(Some(tree), None) => Err(Error::ExpectedString {
span_start: Some(tree.span()),
span_end: Some(tree.span()),
}),
(_, Some(tree)) => Err(Error::UnexpectedToken { tree }),
(None, None) => Err(Error::ExpectedString {
span_start: None,
span_end: None,
}),
}
}
pub(crate) fn consume_number<T: FromStr>(
component_name: &'static str,
chars: &mut Peekable<token_stream::IntoIter>,
) -> Result<(Span, T), Error> {
let (span, digits) = match chars.next() {
Some(TokenTree::Literal(literal)) => (literal.span(), literal.to_string()),
Some(tree) => return Err(Error::UnexpectedToken { tree }),
None => return Err(Error::UnexpectedEndOfInput),
};
if let Ok(value) = digits.replace('_', "").parse() {
Ok((span, value))
} else {
Err(Error::InvalidComponent {
name: component_name,
value: digits,
span_start: Some(span),
span_end: Some(span),
})
}
}
pub(crate) fn consume_any_ident(
idents: &[&str],
chars: &mut Peekable<token_stream::IntoIter>,
) -> Result<Span, Error> {
match chars.peek() {
Some(TokenTree::Ident(char)) if idents.contains(&char.to_string().as_str()) => {
let ret = Ok(char.span());
drop(chars.next());
ret
}
Some(tree) => Err(Error::UnexpectedToken { tree: tree.clone() }),
None => Err(Error::UnexpectedEndOfInput),
}
}
pub(crate) fn consume_punct(
c: char,
chars: &mut Peekable<token_stream::IntoIter>,
) -> Result<Span, Error> {
match chars.peek() {
Some(TokenTree::Punct(punct)) if *punct == c => {
let ret = Ok(punct.span());
drop(chars.next());
ret
}
Some(tree) => Err(Error::UnexpectedToken { tree: tree.clone() }),
None => Err(Error::UnexpectedEndOfInput),
}
}
fn jan_weekday(year: i32, ordinal: i32) -> u8 {
macro_rules! div_floor {
($a:expr, $b:expr) => {{
let (_quotient, _remainder) = ($a / $b, $a % $b);
if (_remainder > 0 && $b < 0) || (_remainder < 0 && $b > 0) {
_quotient - 1
} else {
_quotient
}
}};
}
let adj_year = year - 1;
(ordinal + adj_year + div_floor!(adj_year, 4) - div_floor!(adj_year, 100)
+ div_floor!(adj_year, 400)
+ 6)
.rem_euclid(7)
.cast_unsigned()
.truncate()
}
pub(crate) fn days_in_year_month(year: i32, month: u8) -> u8 {
[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month.extend::<usize>() - 1]
+ u8::from(month == 2 && is_leap_year(year))
}
pub(crate) fn ywd_to_yo(year: i32, week: u8, iso_weekday_number: u8) -> (i32, u16) {
let (ordinal, overflow) = (u16::from(week) * 7 + u16::from(iso_weekday_number))
.overflowing_sub(u16::from(jan_weekday(year, 4)) + 4);
if overflow || ordinal == 0 {
return (year - 1, (ordinal.wrapping_add(days_in_year(year - 1))));
}
let days_in_cur_year = days_in_year(year);
if ordinal > days_in_cur_year {
(year + 1, ordinal - days_in_cur_year)
} else {
(year, ordinal)
}
}
pub(crate) fn ymd_to_yo(year: i32, month: u8, day: u8) -> (i32, u16) {
let ordinal = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
[month.extend::<usize>() - 1]
+ u16::from(month > 2 && is_leap_year(year));
(year, ordinal + u16::from(day))
}

188
vendor/time-macros/src/helpers/string.rs vendored Normal file
View File

@@ -0,0 +1,188 @@
use std::ops::{Index, RangeFrom};
use proc_macro::Span;
use crate::Error;
pub(crate) fn parse(token: &proc_macro::Literal) -> Result<(Span, Vec<u8>), Error> {
let span = token.span();
let repr = token.to_string();
match repr.as_bytes() {
[b'"', ..] => Ok((span, parse_lit_str_cooked(&repr[1..]))),
[b'b', b'"', rest @ ..] => Ok((span, parse_lit_byte_str_cooked(rest))),
[b'r', rest @ ..] | [b'b', b'r', rest @ ..] => Ok((span, parse_lit_str_raw(rest))),
_ => Err(Error::ExpectedString {
span_start: Some(span),
span_end: Some(span),
}),
}
}
fn byte(s: impl AsRef<[u8]>, idx: usize) -> u8 {
s.as_ref().get(idx).copied().unwrap_or_default()
}
fn parse_lit_str_cooked(mut s: &str) -> Vec<u8> {
let mut content = String::new();
'outer: loop {
let ch = match byte(s, 0) {
b'"' => break,
b'\\' => {
let b = byte(s, 1);
s = &s[2..];
match b {
b'x' => {
let (byte, rest) = backslash_x(s);
s = rest;
char::from_u32(u32::from(byte)).expect("byte was just validated")
}
b'u' => {
let (chr, rest) = backslash_u(s);
s = rest;
chr
}
b'n' => '\n',
b'r' => '\r',
b't' => '\t',
b'\\' => '\\',
b'0' => '\0',
b'\'' => '\'',
b'"' => '"',
b'\r' | b'\n' => loop {
let ch = s.chars().next().unwrap_or_default();
if ch.is_whitespace() {
s = &s[ch.len_utf8()..];
} else {
continue 'outer;
}
},
_ => bug!("invalid escape"),
}
}
b'\r' => {
// bare CR not permitted
s = &s[2..];
'\n'
}
_ => {
let ch = s.chars().next().unwrap_or_default();
s = &s[ch.len_utf8()..];
ch
}
};
content.push(ch);
}
content.into_bytes()
}
fn parse_lit_str_raw(s: &[u8]) -> Vec<u8> {
let mut pounds = 0;
while byte(s, pounds) == b'#' {
pounds += 1;
}
let close = s
.iter()
.rposition(|&b| b == b'"')
.expect("had a string without trailing \"");
s[pounds + 1..close].to_owned()
}
fn parse_lit_byte_str_cooked(mut v: &[u8]) -> Vec<u8> {
let mut out = Vec::new();
'outer: loop {
let byte = match byte(v, 0) {
b'"' => break,
b'\\' => {
let b = byte(v, 1);
v = &v[2..];
match b {
b'x' => {
let (byte, rest) = backslash_x(v);
v = rest;
byte
}
b'n' => b'\n',
b'r' => b'\r',
b't' => b'\t',
b'\\' => b'\\',
b'0' => b'\0',
b'\'' => b'\'',
b'"' => b'"',
b'\r' | b'\n' => loop {
let byte = byte(v, 0);
let ch = char::from_u32(u32::from(byte)).expect("invalid byte");
if ch.is_whitespace() {
v = &v[1..];
} else {
continue 'outer;
}
},
_ => bug!("invalid escape"),
}
}
b'\r' => {
// bare CR not permitted
v = &v[2..];
b'\n'
}
b => {
v = &v[1..];
b
}
};
out.push(byte);
}
out
}
fn backslash_x<S>(s: &S) -> (u8, &S)
where
S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
{
let mut ch = 0;
let b0 = byte(s, 0);
let b1 = byte(s, 1);
ch += 0x10 * (b0 - b'0');
ch += match b1 {
b'0'..=b'9' => b1 - b'0',
b'a'..=b'f' => 10 + (b1 - b'a'),
b'A'..=b'F' => 10 + (b1 - b'A'),
_ => bug!("invalid hex escape"),
};
(ch, &s[2..])
}
fn backslash_u(mut s: &str) -> (char, &str) {
s = &s[1..];
let mut ch = 0;
let mut digits = 0;
loop {
let b = byte(s, 0);
let digit = match b {
b'0'..=b'9' => b - b'0',
b'a'..=b'f' => 10 + b - b'a',
b'A'..=b'F' => 10 + b - b'A',
b'_' if digits > 0 => {
s = &s[1..];
continue;
}
b'}' if digits != 0 => break,
_ => bug!("invalid unicode escape"),
};
ch *= 0x10;
ch += u32::from(digit);
digits += 1;
s = &s[1..];
}
s = &s[1..];
(
char::from_u32(ch).expect("invalid unicode escape passed by compiler"),
s,
)
}

280
vendor/time-macros/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,280 @@
#![allow(
clippy::missing_const_for_fn,
clippy::std_instead_of_core,
clippy::std_instead_of_alloc,
clippy::alloc_instead_of_core,
reason = "irrelevant for proc macros"
)]
#![allow(
clippy::missing_docs_in_private_items,
missing_docs,
reason = "may be removed eventually"
)]
#[allow(
unused_macros,
reason = "may not be used for all feature flag combinations"
)]
macro_rules! bug {
() => { compile_error!("provide an error message to help fix a possible bug") };
($descr:literal $($rest:tt)?) => {
unreachable!(concat!("internal error: ", $descr) $($rest)?)
}
}
#[macro_use]
mod quote;
mod date;
mod datetime;
mod error;
#[cfg(any(feature = "formatting", feature = "parsing"))]
mod format_description;
mod helpers;
mod offset;
#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
mod serde_format_description;
mod time;
mod to_tokens;
mod utc_datetime;
#[cfg(any(feature = "formatting", feature = "parsing"))]
use std::iter::Peekable;
#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
use proc_macro::Delimiter;
use proc_macro::TokenStream;
#[cfg(any(feature = "formatting", feature = "parsing"))]
use proc_macro::TokenTree;
use self::error::Error;
macro_rules! impl_macros {
($($name:ident)*) => {$(
#[proc_macro]
pub fn $name(input: TokenStream) -> TokenStream {
use crate::to_tokens::ToTokenStream;
let mut iter = input.into_iter().peekable();
match $name::parse(&mut iter) {
Ok(value) => match iter.peek() {
Some(tree) => Error::UnexpectedToken { tree: tree.clone() }.to_compile_error(),
None => quote_! { const { #S(value.into_token_stream()) } },
},
Err(err) => err.to_compile_error(),
}
}
)*};
}
impl_macros![date datetime utc_datetime offset time];
#[cfg(any(feature = "formatting", feature = "parsing"))]
type PeekableTokenStreamIter = Peekable<proc_macro::token_stream::IntoIter>;
#[cfg(any(feature = "formatting", feature = "parsing"))]
enum FormatDescriptionVersion {
V1,
V2,
}
#[cfg(any(feature = "formatting", feature = "parsing"))]
fn parse_format_description_version<const NO_EQUALS_IS_MOD_NAME: bool>(
iter: &mut PeekableTokenStreamIter,
) -> Result<Option<FormatDescriptionVersion>, Error> {
let end_of_input_err = || {
if NO_EQUALS_IS_MOD_NAME {
Error::UnexpectedEndOfInput
} else {
Error::ExpectedString {
span_start: None,
span_end: None,
}
}
};
let version_ident = match iter.peek().ok_or_else(end_of_input_err)? {
version @ TokenTree::Ident(ident) if ident.to_string() == "version" => {
let version_ident = version.clone();
iter.next(); // consume `version`
version_ident
}
_ => return Ok(None),
};
match iter.peek() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '=' => iter.next(),
_ if NO_EQUALS_IS_MOD_NAME => {
// Push the `version` ident to the front of the iterator.
*iter = std::iter::once(version_ident)
.chain(iter.clone())
.collect::<TokenStream>()
.into_iter()
.peekable();
return Ok(None);
}
Some(token) => {
return Err(Error::Custom {
message: "expected `=`".into(),
span_start: Some(token.span()),
span_end: Some(token.span()),
});
}
None => {
return Err(Error::Custom {
message: "expected `=`".into(),
span_start: None,
span_end: None,
});
}
};
let version_literal = match iter.next() {
Some(TokenTree::Literal(literal)) => literal,
Some(token) => {
return Err(Error::Custom {
message: "expected 1 or 2".into(),
span_start: Some(token.span()),
span_end: Some(token.span()),
});
}
None => {
return Err(Error::Custom {
message: "expected 1 or 2".into(),
span_start: None,
span_end: None,
});
}
};
let version = match version_literal.to_string().as_str() {
"1" => FormatDescriptionVersion::V1,
"2" => FormatDescriptionVersion::V2,
_ => {
return Err(Error::Custom {
message: "invalid format description version".into(),
span_start: Some(version_literal.span()),
span_end: Some(version_literal.span()),
});
}
};
helpers::consume_punct(',', iter)?;
Ok(Some(version))
}
#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
fn parse_visibility(iter: &mut PeekableTokenStreamIter) -> Result<TokenStream, Error> {
let mut visibility = match iter.peek().ok_or(Error::UnexpectedEndOfInput)? {
pub_ident @ TokenTree::Ident(ident) if ident.to_string() == "pub" => {
let visibility = quote_! { #(pub_ident.clone()) };
iter.next(); // consume `pub`
visibility
}
_ => return Ok(quote_! {}),
};
match iter.peek().ok_or(Error::UnexpectedEndOfInput)? {
group @ TokenTree::Group(path) if path.delimiter() == Delimiter::Parenthesis => {
visibility.extend(std::iter::once(group.clone()));
iter.next(); // consume parentheses and path
}
_ => {}
}
Ok(visibility)
}
#[cfg(any(feature = "formatting", feature = "parsing"))]
#[proc_macro]
pub fn format_description(input: TokenStream) -> TokenStream {
(|| {
let mut input = input.into_iter().peekable();
let version = parse_format_description_version::<false>(&mut input)?;
let (span, string) = helpers::get_string_literal(input)?;
let items = format_description::parse_with_version(version, &string, span)?;
Ok(quote_! {
const {
use ::time::format_description::{*, modifier::*};
&[#S(
items
.into_iter()
.map(|item| quote_! { #S(item), })
.collect::<TokenStream>()
)] as StaticFormatDescription
}
})
})()
.unwrap_or_else(|err: Error| err.to_compile_error())
}
#[cfg(all(feature = "serde", any(feature = "formatting", feature = "parsing")))]
#[proc_macro]
pub fn serde_format_description(input: TokenStream) -> TokenStream {
(|| {
let mut tokens = input.into_iter().peekable();
// First, the optional format description version.
let version = parse_format_description_version::<true>(&mut tokens)?;
// Then, the visibility of the module.
let visibility = parse_visibility(&mut tokens)?;
// Next, an identifier (the desired module name)
let mod_name = match tokens.next() {
Some(TokenTree::Ident(ident)) => Ok(ident),
Some(tree) => Err(Error::UnexpectedToken { tree }),
None => Err(Error::UnexpectedEndOfInput),
}?;
// Followed by a comma
helpers::consume_punct(',', &mut tokens)?;
// Then, the type to create serde serializers for (e.g., `OffsetDateTime`).
let formattable = match tokens.next() {
Some(tree @ TokenTree::Ident(_)) => Ok(tree),
Some(tree) => Err(Error::UnexpectedToken { tree }),
None => Err(Error::UnexpectedEndOfInput),
}?;
// Another comma
helpers::consume_punct(',', &mut tokens)?;
// We now have two options. The user can either provide a format description as a string or
// they can provide a path to a format description. If the latter, all remaining tokens are
// assumed to be part of the path.
let (format, format_description_display) = match tokens.peek() {
// string literal
Some(TokenTree::Literal(_)) => {
let (span, format_string) = helpers::get_string_literal(tokens)?;
let items = format_description::parse_with_version(version, &format_string, span)?;
let items: TokenStream = items
.into_iter()
.map(|item| quote_! { #S(item), })
.collect();
let items = quote_! {
const {
use ::time::format_description::{*, modifier::*};
&[#S(items)] as StaticFormatDescription
}
};
(items, String::from_utf8_lossy(&format_string).into_owned())
}
// path
Some(_) => {
let tokens = tokens.collect::<TokenStream>();
let tokens_string = tokens.to_string();
(tokens, tokens_string)
}
None => return Err(Error::UnexpectedEndOfInput),
};
Ok(serde_format_description::build(
visibility,
mod_name,
formattable,
format,
format_description_display,
))
})()
.unwrap_or_else(|err: Error| err.to_compile_error_standalone())
}

97
vendor/time-macros/src/offset.rs vendored Normal file
View File

@@ -0,0 +1,97 @@
use std::iter::Peekable;
use proc_macro::{Span, TokenStream, token_stream};
use time_core::convert::*;
use crate::Error;
use crate::helpers::{consume_any_ident, consume_number, consume_punct};
use crate::to_tokens::ToTokenStream;
pub(crate) struct Offset {
pub(crate) hours: i8,
pub(crate) minutes: i8,
pub(crate) seconds: i8,
}
pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Offset, Error> {
if consume_any_ident(&["utc", "UTC"], chars).is_ok() {
return Ok(Offset {
hours: 0,
minutes: 0,
seconds: 0,
});
}
let sign = if consume_punct('+', chars).is_ok() {
1
} else if consume_punct('-', chars).is_ok() {
-1
} else if let Some(tree) = chars.next() {
return Err(Error::UnexpectedToken { tree });
} else {
return Err(Error::MissingComponent {
name: "sign",
span_start: None,
span_end: None,
});
};
let (hours_span, hours) = consume_number::<i8>("hour", chars)?;
let (mut minutes_span, mut minutes) = (Span::mixed_site(), 0);
let (mut seconds_span, mut seconds) = (Span::mixed_site(), 0);
if consume_punct(':', chars).is_ok() {
let min = consume_number::<i8>("minute", chars)?;
minutes_span = min.0;
minutes = min.1;
if consume_punct(':', chars).is_ok() {
let sec = consume_number::<i8>("second", chars)?;
seconds_span = sec.0;
seconds = sec.1;
}
}
if hours > 25 {
Err(Error::InvalidComponent {
name: "hour",
value: hours.to_string(),
span_start: Some(hours_span.start()),
span_end: Some(hours_span.end()),
})
} else if minutes >= Minute::per_t(Hour) {
Err(Error::InvalidComponent {
name: "minute",
value: minutes.to_string(),
span_start: Some(minutes_span.start()),
span_end: Some(minutes_span.end()),
})
} else if seconds >= Second::per_t(Minute) {
Err(Error::InvalidComponent {
name: "second",
value: seconds.to_string(),
span_start: Some(seconds_span.start()),
span_end: Some(seconds_span.end()),
})
} else {
Ok(Offset {
hours: sign * hours,
minutes: sign * minutes,
seconds: sign * seconds,
})
}
}
impl ToTokenStream for Offset {
fn append_to(self, ts: &mut TokenStream) {
quote_append! { ts
unsafe {
::time::UtcOffset::__from_hms_unchecked(
#(self.hours),
#(self.minutes),
#(self.seconds),
)
}
}
}
}

145
vendor/time-macros/src/quote.rs vendored Normal file
View File

@@ -0,0 +1,145 @@
macro_rules! quote_ {
() => (proc_macro::TokenStream::new());
($($x:tt)*) => {{
use proc_macro::*;
let mut ts = TokenStream::new();
let ts_mut = &mut ts;
quote_inner!(ts_mut $($x)*);
ts
}};
}
macro_rules! quote_append {
($ts:ident $($x:tt)*) => {{
use proc_macro::*;
quote_inner!($ts $($x)*);
}};
}
macro_rules! quote_group {
({ $($x:tt)* }) => {{
use proc_macro::*;
TokenTree::Group(Group::new(
Delimiter::Brace,
quote_!($($x)*)
))
}};
}
macro_rules! sym {
($ts:ident $x:tt $y:tt) => {
$ts.extend([
TokenTree::from(Punct::new($x, Spacing::Joint)),
TokenTree::from(Punct::new($y, Spacing::Alone)),
]);
};
($ts:ident $x:tt) => {
$ts.extend([TokenTree::from(Punct::new($x, Spacing::Alone))]);
};
}
#[allow(unused_macro_rules)] // Varies by feature flag combination.
macro_rules! quote_inner {
// Base case
($ts:ident) => {};
// Single or double symbols
($ts:ident :: $($tail:tt)*) => { sym!($ts ':' ':'); quote_inner!($ts $($tail)*); };
($ts:ident : $($tail:tt)*) => { sym!($ts ':'); quote_inner!($ts $($tail)*); };
($ts:ident = $($tail:tt)*) => { sym!($ts '='); quote_inner!($ts $($tail)*); };
($ts:ident ; $($tail:tt)*) => { sym!($ts ';'); quote_inner!($ts $($tail)*); };
($ts:ident , $($tail:tt)*) => { sym!($ts ','); quote_inner!($ts $($tail)*); };
($ts:ident . $($tail:tt)*) => { sym!($ts '.'); quote_inner!($ts $($tail)*); };
($ts:ident & $($tail:tt)*) => { sym!($ts '&'); quote_inner!($ts $($tail)*); };
($ts:ident < $($tail:tt)*) => { sym!($ts '<'); quote_inner!($ts $($tail)*); };
($ts:ident >> $($tail:tt)*) => { sym!($ts '>' '>'); quote_inner!($ts $($tail)*); };
($ts:ident > $($tail:tt)*) => { sym!($ts '>'); quote_inner!($ts $($tail)*); };
($ts:ident -> $($tail:tt)*) => { sym!($ts '-' '>'); quote_inner!($ts $($tail)*); };
($ts:ident ? $($tail:tt)*) => { sym!($ts '?'); quote_inner!($ts $($tail)*); };
($ts:ident ! $($tail:tt)*) => { sym!($ts '!'); quote_inner!($ts $($tail)*); };
($ts:ident | $($tail:tt)*) => { sym!($ts '|'); quote_inner!($ts $($tail)*); };
($ts:ident * $($tail:tt)*) => { sym!($ts '*'); quote_inner!($ts $($tail)*); };
($ts:ident + $($tail:tt)*) => { sym!($ts '+'); quote_inner!($ts $($tail)*); };
// Identifier
($ts:ident $i:ident $($tail:tt)*) => {
$ts.extend([TokenTree::from(Ident::new(
&stringify!($i),
Span::mixed_site(),
))]);
quote_inner!($ts $($tail)*);
};
// Literal
($ts:ident 0 $($tail:tt)*) => {
$ts.extend([TokenTree::from(Literal::usize_unsuffixed(0))]);
quote_inner!($ts $($tail)*);
};
($ts:ident $l:literal $($tail:tt)*) => {
$ts.extend([TokenTree::from(Literal::string(&$l))]);
quote_inner!($ts $($tail)*);
};
// Lifetime
($ts:ident $l:lifetime $($tail:tt)*) => {
$ts.extend([
TokenTree::from(
Punct::new('\'', Spacing::Joint)
),
TokenTree::from(Ident::new(
stringify!($l).trim_start_matches(|c| c == '\''),
Span::mixed_site(),
)),
]);
quote_inner!($ts $($tail)*);
};
// Attribute
($ts:ident #[$($inner:tt)*] $($tail:tt)*) => {
$ts.extend([
TokenTree::from(
Punct::new('#', Spacing::Alone)
),
TokenTree::Group(Group::new(
Delimiter::Bracket,
quote_!($($inner)*)
)),
]);
quote_inner!($ts $($tail)*);
};
// Groups
($ts:ident ($($inner:tt)*) $($tail:tt)*) => {
$ts.extend([TokenTree::Group(Group::new(
Delimiter::Parenthesis,
quote_!($($inner)*)
))]);
quote_inner!($ts $($tail)*);
};
($ts:ident [$($inner:tt)*] $($tail:tt)*) => {
$ts.extend([TokenTree::Group(Group::new(
Delimiter::Bracket,
quote_!($($inner)*)
))]);
quote_inner!($ts $($tail)*);
};
($ts:ident {$($inner:tt)*} $($tail:tt)*) => {
$ts.extend([TokenTree::Group(Group::new(
Delimiter::Brace,
quote_!($($inner)*)
))]);
quote_inner!($ts $($tail)*);
};
// Interpolated values
// TokenTree by default
($ts:ident #($e:expr) $($tail:tt)*) => {
$ts.extend([$crate::to_tokens::ToTokenTree::into_token_tree($e)]);
quote_inner!($ts $($tail)*);
};
// Allow a TokenStream by request. It's more expensive, so avoid if possible.
($ts:ident #S($e:expr) $($tail:tt)*) => {
$crate::to_tokens::ToTokenStream::append_to($e, $ts);
quote_inner!($ts $($tail)*);
};
}

View File

@@ -0,0 +1,185 @@
use proc_macro::{Ident, TokenStream, TokenTree};
pub(crate) fn build(
visibility: TokenStream,
mod_name: Ident,
ty: TokenTree,
format: TokenStream,
format_description_display: String,
) -> TokenStream {
let ty_s = &*ty.to_string();
let visitor = if cfg!(feature = "parsing") {
quote_! {
pub(super) struct Visitor;
pub(super) struct OptionVisitor;
impl<'a> ::serde::de::Visitor<'a> for Visitor {
type Value = __TimeSerdeType;
fn expecting(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(
f,
concat!(
"a(n) `",
#(ty_s),
"` in the format \"{}\"",
),
#(format_description_display.as_str())
)
}
fn visit_str<E: ::serde::de::Error>(
self,
value: &str
) -> Result<__TimeSerdeType, E> {
__TimeSerdeType::parse(value, &description()).map_err(E::custom)
}
}
impl<'a> ::serde::de::Visitor<'a> for OptionVisitor {
type Value = Option<__TimeSerdeType>;
fn expecting(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(
f,
concat!(
"an `Option<",
#(ty_s),
">` in the format \"{}\"",
),
#(format_description_display.as_str())
)
}
fn visit_some<D: ::serde::de::Deserializer<'a>>(
self,
deserializer: D
) -> Result<Option<__TimeSerdeType>, D::Error> {
deserializer
.deserialize_str(Visitor)
.map(Some)
}
fn visit_none<E: ::serde::de::Error>(
self
) -> Result<Option<__TimeSerdeType>, E> {
Ok(None)
}
}
}
} else {
quote_!()
};
let serialize_primary = if cfg!(feature = "formatting") {
quote_! {
pub fn serialize<S: ::serde::Serializer>(
datetime: &__TimeSerdeType,
serializer: S,
) -> Result<S::Ok, S::Error> {
use ::serde::Serialize;
datetime
.format(&description())
.map_err(::time::error::Format::into_invalid_serde_value::<S>)?
.serialize(serializer)
}
}
} else {
quote_!()
};
let deserialize_primary = if cfg!(feature = "parsing") {
quote_! {
pub fn deserialize<'a, D: ::serde::Deserializer<'a>>(
deserializer: D
) -> Result<__TimeSerdeType, D::Error> {
use ::serde::Deserialize;
deserializer.deserialize_str(Visitor)
}
}
} else {
quote_!()
};
let serialize_option = if cfg!(feature = "formatting") {
quote_! {
#[expect(clippy::ref_option)]
pub fn serialize<S: ::serde::Serializer>(
option: &Option<__TimeSerdeType>,
serializer: S,
) -> Result<S::Ok, S::Error> {
use ::serde::Serialize;
option.map(|datetime| datetime.format(&description()))
.transpose()
.map_err(::time::error::Format::into_invalid_serde_value::<S>)?
.serialize(serializer)
}
}
} else {
quote_!()
};
let deserialize_option = if cfg!(feature = "parsing") {
quote_! {
pub fn deserialize<'a, D: ::serde::Deserializer<'a>>(
deserializer: D
) -> Result<Option<__TimeSerdeType>, D::Error> {
use ::serde::Deserialize;
deserializer.deserialize_option(OptionVisitor)
}
}
} else {
quote_!()
};
let deserialize_option_imports = if cfg!(feature = "parsing") {
quote_! {
use super::__hygiene::{OptionVisitor, Visitor};
}
} else {
quote_!()
};
let fd_traits = match (cfg!(feature = "formatting"), cfg!(feature = "parsing")) {
(false, false) => {
bug!("serde_format_description::build called without formatting or parsing enabled")
}
(false, true) => quote_! { ::time::parsing::Parsable },
(true, false) => quote_! { ::time::formatting::Formattable },
(true, true) => quote_! { ::time::formatting::Formattable + ::time::parsing::Parsable },
};
quote_! {
#S(visibility) mod #(mod_name) {
use super::*;
// TODO Remove the prefix, forcing the user to import the type themself. This must be
// done in a breaking change.
use ::time::#(ty) as __TimeSerdeType;
#[expect(clippy::pub_use)]
pub use self::__hygiene::*;
const fn description() -> impl #S(fd_traits) {
#S(format)
}
mod __hygiene {
use super::{description, __TimeSerdeType};
#S(visitor)
#S(serialize_primary)
#S(deserialize_primary)
}
// While technically public, this is effectively the same visibility as the enclosing
// module, which has its visibility controlled by the user.
pub mod option {
use super::{description, __TimeSerdeType};
#S(deserialize_option_imports)
#S(serialize_option)
#S(deserialize_option)
}
}
}
}

120
vendor/time-macros/src/time.rs vendored Normal file
View File

@@ -0,0 +1,120 @@
use std::iter::Peekable;
use proc_macro::{Span, TokenStream, token_stream};
use time_core::convert::*;
use crate::Error;
use crate::helpers::{consume_any_ident, consume_number, consume_punct};
use crate::to_tokens::ToTokenStream;
enum Period {
Am,
Pm,
_24,
}
pub(crate) struct Time {
pub(crate) hour: u8,
pub(crate) minute: u8,
pub(crate) second: u8,
pub(crate) nanosecond: u32,
}
pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<Time, Error> {
fn consume_period(chars: &mut Peekable<token_stream::IntoIter>) -> (Option<Span>, Period) {
if let Ok(span) = consume_any_ident(&["am", "AM"], chars) {
(Some(span), Period::Am)
} else if let Ok(span) = consume_any_ident(&["pm", "PM"], chars) {
(Some(span), Period::Pm)
} else {
(None, Period::_24)
}
}
let (hour_span, hour) = consume_number("hour", chars)?;
let ((minute_span, minute), (second_span, second), (period_span, period)) =
match consume_period(chars) {
// Nothing but the 12-hour clock hour and AM/PM
(period_span @ Some(_), period) => (
(Span::mixed_site(), 0),
(Span::mixed_site(), 0.),
(period_span, period),
),
(None, _) => {
consume_punct(':', chars)?;
let (minute_span, minute) = consume_number::<u8>("minute", chars)?;
let (second_span, second): (_, f64) = if consume_punct(':', chars).is_ok() {
consume_number("second", chars)?
} else {
(Span::mixed_site(), 0.)
};
let (period_span, period) = consume_period(chars);
(
(minute_span, minute),
(second_span, second),
(period_span, period),
)
}
};
let hour = match (hour, period) {
(0, Period::Am | Period::Pm) => {
return Err(Error::InvalidComponent {
name: "hour",
value: hour.to_string(),
span_start: Some(hour_span.start()),
span_end: Some(period_span.unwrap_or_else(|| hour_span.end())),
});
}
(12, Period::Am) => 0,
(12, Period::Pm) => 12,
(hour, Period::Am | Period::_24) => hour,
(hour, Period::Pm) => hour + 12,
};
if hour >= Hour::per_t(Day) {
Err(Error::InvalidComponent {
name: "hour",
value: hour.to_string(),
span_start: Some(hour_span.start()),
span_end: Some(period_span.unwrap_or_else(|| hour_span.end())),
})
} else if minute >= Minute::per_t(Hour) {
Err(Error::InvalidComponent {
name: "minute",
value: minute.to_string(),
span_start: Some(minute_span.start()),
span_end: Some(minute_span.end()),
})
} else if second >= Second::per_t(Minute) {
Err(Error::InvalidComponent {
name: "second",
value: second.to_string(),
span_start: Some(second_span.start()),
span_end: Some(second_span.end()),
})
} else {
Ok(Time {
hour,
minute,
second: second.trunc() as u8,
nanosecond: (second.fract() * Nanosecond::per_t::<f64>(Second)).round() as u32,
})
}
}
impl ToTokenStream for Time {
fn append_to(self, ts: &mut TokenStream) {
quote_append! { ts
unsafe {
::time::Time::__from_hms_nanos_unchecked(
#(self.hour),
#(self.minute),
#(self.second),
#(self.nanosecond),
)
}
}
}
}

84
vendor/time-macros/src/to_tokens.rs vendored Normal file
View File

@@ -0,0 +1,84 @@
use std::num::NonZero;
use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
/// Turn a type into a [`TokenStream`].
pub(crate) trait ToTokenStream: Sized {
fn append_to(self, ts: &mut TokenStream);
fn into_token_stream(self) -> TokenStream {
let mut ts = TokenStream::new();
self.append_to(&mut ts);
ts
}
}
pub(crate) trait ToTokenTree: Sized {
fn into_token_tree(self) -> TokenTree;
}
impl<T: ToTokenTree> ToTokenStream for T {
fn append_to(self, ts: &mut TokenStream) {
ts.extend([self.into_token_tree()])
}
}
impl ToTokenTree for bool {
fn into_token_tree(self) -> TokenTree {
let lit = if self { "true" } else { "false" };
TokenTree::Ident(Ident::new(lit, Span::mixed_site()))
}
}
impl ToTokenStream for TokenStream {
fn append_to(self, ts: &mut TokenStream) {
ts.extend(self)
}
}
impl ToTokenTree for TokenTree {
fn into_token_tree(self) -> TokenTree {
self
}
}
impl ToTokenTree for &str {
fn into_token_tree(self) -> TokenTree {
TokenTree::Literal(Literal::string(self))
}
}
impl ToTokenTree for NonZero<u16> {
fn into_token_tree(self) -> TokenTree {
quote_group! {{
unsafe { ::core::num::NonZero::<u16>::new_unchecked(#(self.get())) }
}}
}
}
macro_rules! impl_for_tree_types {
($($type:ty)*) => {$(
impl ToTokenTree for $type {
fn into_token_tree(self) -> TokenTree {
TokenTree::from(self)
}
}
)*};
}
impl_for_tree_types![Ident Literal Group Punct];
macro_rules! impl_for_int {
($($type:ty => $method:ident)*) => {$(
impl ToTokenTree for $type {
fn into_token_tree(self) -> TokenTree {
TokenTree::from(Literal::$method(self))
}
}
)*};
}
impl_for_int! {
i8 => i8_unsuffixed
u8 => u8_unsuffixed
u16 => u16_unsuffixed
i32 => i32_unsuffixed
u32 => u32_unsuffixed
}

38
vendor/time-macros/src/utc_datetime.rs vendored Normal file
View File

@@ -0,0 +1,38 @@
use std::iter::Peekable;
use proc_macro::{TokenStream, token_stream};
use crate::date::Date;
use crate::error::Error;
use crate::time::Time;
use crate::to_tokens::ToTokenStream;
use crate::{date, time};
pub(crate) struct UtcDateTime {
date: Date,
time: Time,
}
pub(crate) fn parse(chars: &mut Peekable<token_stream::IntoIter>) -> Result<UtcDateTime, Error> {
let date = date::parse(chars)?;
let time = time::parse(chars)?;
if let Some(token) = chars.peek() {
return Err(Error::UnexpectedToken {
tree: token.clone(),
});
}
Ok(UtcDateTime { date, time })
}
impl ToTokenStream for UtcDateTime {
fn append_to(self, ts: &mut TokenStream) {
quote_append! { ts
::time::UtcDateTime::new(
#S(self.date),
#S(self.time),
)
}
}
}