feat(parser): add action parameters, repeater decorator, and named participants

Add syntax enhancements for more expressive behavior trees and relationships.

Action parameters:
- Support typed/positional parameters: WaitDuration(2.0)
- Support named parameters: SetValue(field: value)
- Enable inline values without field names

Repeater decorator:
- Add * { node } syntax for repeating behavior nodes
- Maps to Decorator("repeat", node)

Named participant blocks:
- Replace self/other blocks with named participant syntax
- Support multi-party relationships
- Example: Alice { role: seeker } instead of self { role: seeker }

Schedule block syntax:
- Require braces for schedule blocks to support narrative fields
- Update tests to use new syntax: 04:00 -> 06:00: Activity { }
This commit is contained in:
2026-02-08 15:45:56 +00:00
parent 4c89c80748
commit a8882eb3ec
12 changed files with 5906 additions and 3659 deletions

View File

@@ -35,6 +35,8 @@ pub enum Token {
State,
#[token("on")]
On,
#[token("enter")]
Enter,
#[token("as")]
As,
#[token("self")]
@@ -180,6 +182,7 @@ pub struct Lexer<'a> {
position: usize,
state: LexerState,
normal_lexer: Option<logos::Lexer<'a, Token>>,
lexer_base_offset: usize, // Offset of the substring that normal_lexer is lexing
}
impl<'a> Lexer<'a> {
@@ -189,6 +192,7 @@ impl<'a> Lexer<'a> {
position: 0,
state: LexerState::Normal,
normal_lexer: Some(Token::lexer(source)),
lexer_base_offset: 0,
}
}
@@ -257,6 +261,7 @@ impl<'a> Lexer<'a> {
let start = content_start.saturating_sub(tag.len() + 4); // Include opening ---tag
self.position = content_end + 3; // Skip closing ---
self.state = LexerState::Normal;
self.lexer_base_offset = self.position; // Update base offset for new substring
self.normal_lexer = Some(Token::lexer(&self.source[self.position..]));
let prose_block = super::ast::ProseBlock {
@@ -295,19 +300,25 @@ impl<'a> Iterator for Lexer<'a> {
match token {
| Ok(Token::ProseMarker) => {
// Switch to prose mode
let marker_pos = span.start;
self.position = marker_pos;
// span is relative to the substring that logos is lexing; add base offset
self.position = self.lexer_base_offset + span.start;
self.state = LexerState::ProseTag;
self.normal_lexer = None;
self.scan_prose_tag()
},
| Ok(tok) => {
self.position = span.end;
Some((span.start, tok, span.end))
// Adjust span to be relative to original source
let absolute_start = self.lexer_base_offset + span.start;
let absolute_end = self.lexer_base_offset + span.end;
self.position = absolute_end;
Some((absolute_start, tok, absolute_end))
},
| Err(_) => {
self.position = span.end;
Some((span.start, Token::Error, span.end))
// Adjust span to be relative to original source
let absolute_start = self.lexer_base_offset + span.start;
let absolute_end = self.lexer_base_offset + span.end;
self.position = absolute_end;
Some((absolute_start, Token::Error, absolute_end))
},
}
},
@@ -393,6 +404,38 @@ The bakery had a no-nonsense policy.
}
}
#[test]
fn test_multiple_prose_blocks() {
let input = r#"
---description
First prose block content.
---
---details
Second prose block content.
---
"#;
let lexer = Lexer::new(input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 2, "Should have exactly 2 prose block tokens");
match &tokens[0] {
| Token::ProseBlock(pb) => {
assert_eq!(pb.tag, "description");
assert!(pb.content.contains("First prose block"));
},
| _ => panic!("Expected first ProseBlock, got {:?}", tokens[0]),
}
match &tokens[1] {
| Token::ProseBlock(pb) => {
assert_eq!(pb.tag, "details");
assert!(pb.content.contains("Second prose block"));
},
| _ => panic!("Expected second ProseBlock, got {:?}", tokens[1]),
}
}
#[test]
fn test_time_duration_literals() {
let input = "08:30 14:45:00 2h30m 45m";