Files
storybook/src/syntax/prop_tests.rs
Sienna Meridian Satterwhite 9c20dd4092 feat: implement storybook DSL with template composition and validation
Add complete domain-specific language for authoring narrative content for
agent simulations.

Features:
- Complete parser using LALRPOP + logos lexer
- Template composition (includes + multiple inheritance)
- Strict mode validation for templates
- Reserved keyword protection
- Semantic validators (trait ranges, schedule overlaps, life arcs, behaviors)
- Name resolution and cross-reference tracking
- CLI tool (validate, inspect, query commands)
- Query API with filtering
- 260 comprehensive tests (unit, integration, property-based)

Implementation phases:
- Phase 1 (Parser): Complete
- Phase 2 (Resolution + Validation): Complete
- Phase 3 (Public API + CLI): Complete

BREAKING CHANGE: Initial implementation
2026-02-08 13:24:35 +00:00

1442 lines
51 KiB
Rust

use proptest::{
prelude::*,
strategy::BoxedStrategy,
};
use crate::syntax::{
lexer::{
Lexer,
Token,
},
FileParser,
};
// ===== Generators for valid syntax elements =====
fn valid_ident() -> impl Strategy<Value = String> {
"[a-zA-Z_][a-zA-Z0-9_]{0,20}".prop_filter("not a keyword", |s| {
!matches!(
s.as_str(),
"use" |
"character" |
"template" |
"life_arc" |
"schedule" |
"behavior" |
"institution" |
"relationship" |
"location" |
"species" |
"enum" |
"state" |
"on" |
"as" |
"self" |
"other" |
"remove" |
"append" |
"forall" |
"exists" |
"in" |
"where" |
"and" |
"or" |
"not" |
"is" |
"true" |
"false"
)
})
}
fn valid_string() -> impl Strategy<Value = String> {
// Strings without quotes or backslashes for simplicity
"[a-zA-Z0-9 ,.!?-]{0,50}"
}
fn valid_int() -> impl Strategy<Value = i64> {
-1000i64..1000i64
}
fn valid_float() -> impl Strategy<Value = f64> {
(-1000.0..1000.0).prop_filter("finite", |f: &f64| f.is_finite())
}
fn valid_time() -> impl Strategy<Value = (u8, u8, u8)> {
(0u8..24, 0u8..60, 0u8..60)
}
fn valid_duration() -> impl Strategy<Value = (u32, u32, u32)> {
(0u32..24, 0u32..60, 0u32..60)
}
// ===== Lexer property tests =====
proptest! {
#[test]
fn test_lexer_doesnt_panic(s in "\\PC{0,100}") {
// Any string should not panic the lexer
let lexer = Lexer::new(&s);
let _tokens: Vec<_> = lexer.collect();
}
#[test]
fn test_valid_ident_tokenizes(name in valid_ident()) {
let lexer = Lexer::new(&name);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match &tokens[0] {
Token::Ident(s) => assert_eq!(s, &name),
_ => panic!("Expected Ident token, got {:?}", tokens[0]),
}
}
#[test]
fn test_valid_int_tokenizes(n in valid_int()) {
let input = n.to_string();
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match tokens[0] {
Token::IntLit(val) => assert_eq!(val, n),
_ => panic!("Expected IntLit token"),
}
}
#[test]
fn test_valid_float_tokenizes(n in valid_float()) {
let input = format!("{:.2}", n);
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match tokens[0] {
Token::FloatLit(_) => {},
_ => panic!("Expected FloatLit token"),
}
}
#[test]
fn test_valid_string_tokenizes(s in valid_string()) {
let input = format!("\"{}\"", s);
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match &tokens[0] {
Token::StringLit(val) => assert_eq!(val, &s),
_ => panic!("Expected StringLit token"),
}
}
#[test]
fn test_time_literal_tokenizes(time in valid_time()) {
let (h, m, s) = time;
let input = format!("{:02}:{:02}:{:02}", h, m, s);
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match &tokens[0] {
Token::TimeLit(_) => {},
_ => panic!("Expected TimeLit token"),
}
}
#[test]
fn test_duration_literal_tokenizes(dur in valid_duration()) {
let (h, m, s) = dur;
let input = if h > 0 && m > 0 && s > 0 {
format!("{}h{}m{}s", h, m, s)
} else if h > 0 && m > 0 {
format!("{}h{}m", h, m)
} else if h > 0 {
format!("{}h", h)
} else if m > 0 {
format!("{}m", m)
} else {
format!("{}s", s)
};
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
if !input.is_empty() && input != "0h" && input != "0m" && input != "0s" {
assert!(!tokens.is_empty(), "Duration '{}' should tokenize", input);
}
}
#[test]
fn test_keywords_are_distinct_from_idents(
keyword in prop::sample::select(vec![
"character", "template", "enum", "use", "self", "other",
"and", "or", "not", "is", "true", "false"
])
) {
let lexer = Lexer::new(keyword);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
// Should be a keyword token, not Ident
if let Token::Ident(_) = &tokens[0] { panic!("'{}' should be a keyword, not an Ident", keyword) }
}
#[test]
fn test_whitespace_separates_tokens(
name1 in valid_ident(),
name2 in valid_ident(),
ws in "[ \t\n]{1,5}"
) {
let input = format!("{}{}{}", name1, ws, name2);
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 2);
match (&tokens[0], &tokens[1]) {
(Token::Ident(s1), Token::Ident(s2)) => {
assert_eq!(s1, &name1);
assert_eq!(s2, &name2);
}
_ => panic!("Expected two Ident tokens"),
}
}
}
// ===== Parser property tests =====
fn valid_field() -> impl Strategy<Value = (String, String)> {
(valid_ident(), valid_int().prop_map(|n| n.to_string()))
}
fn valid_character() -> impl Strategy<Value = String> {
(valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| {
let fields_str = fields
.iter()
.map(|(k, v)| format!(" {}: {}", k, v))
.collect::<Vec<_>>()
.join("\n");
format!("character {} {{\n{}\n}}", name, fields_str)
})
}
fn valid_template() -> impl Strategy<Value = String> {
(
valid_ident(),
prop::collection::vec(
(valid_ident(), valid_int(), valid_int()).prop_map(|(name, lo, hi)| {
let (min, max) = if lo < hi { (lo, hi) } else { (hi, lo) };
(name, format!("{}..{}", min, max))
}),
0..5,
),
)
.prop_map(|(name, fields)| {
let fields_str = fields
.iter()
.map(|(k, v)| format!(" {}: {}", k, v))
.collect::<Vec<_>>()
.join("\n");
format!("template {} {{\n{}\n}}", name, fields_str)
})
}
fn valid_enum() -> impl Strategy<Value = String> {
(valid_ident(), prop::collection::vec(valid_ident(), 1..10))
.prop_map(|(name, variants)| format!("enum {} {{ {} }}", name, variants.join(", ")))
}
fn valid_schedule() -> impl Strategy<Value = String> {
(valid_ident(), prop::collection::vec(valid_time(), 1..5)).prop_map(|(name, times)| {
let blocks = times
.windows(2)
.map(|w| {
let (h1, m1, s1) = w[0];
let (h2, m2, _) = w[1];
format!(
" {:02}:{:02}:{:02} -> {:02}:{:02}:00: activity",
h1, m1, s1, h2, m2
)
})
.collect::<Vec<_>>()
.join("\n");
format!("schedule {} {{\n{}\n}}", name, blocks)
})
}
fn valid_location() -> impl Strategy<Value = String> {
(valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| {
let fields_str = fields
.iter()
.map(|(k, v)| format!(" {}: {}", k, v))
.collect::<Vec<_>>()
.join("\n");
format!("location {} {{\n{}\n}}", name, fields_str)
})
}
fn valid_species() -> impl Strategy<Value = String> {
(valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| {
let fields_str = fields
.iter()
.map(|(k, v)| format!(" {}: {}", k, v))
.collect::<Vec<_>>()
.join("\n");
format!("species {} {{\n{}\n}}", name, fields_str)
})
}
fn valid_institution() -> impl Strategy<Value = String> {
(valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| {
let fields_str = fields
.iter()
.map(|(k, v)| format!(" {}: {}", k, v))
.collect::<Vec<_>>()
.join("\n");
format!("institution {} {{\n{}\n}}", name, fields_str)
})
}
fn valid_relationship() -> impl Strategy<Value = String> {
(
valid_ident(),
valid_ident(),
valid_ident(),
prop::collection::vec(valid_field(), 0..3),
)
.prop_map(|(name, person1, person2, fields)| {
let fields_str = fields
.iter()
.map(|(k, v)| format!(" {}: {}", k, v))
.collect::<Vec<_>>()
.join("\n");
format!(
"relationship {} {{\n {}\n {}\n{}\n}}",
name, person1, person2, fields_str
)
})
}
proptest! {
#[test]
fn test_parser_doesnt_panic(input in "\\PC{0,200}") {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let _ = parser.parse(lexer);
// Should not panic
}
#[test]
fn test_valid_character_parses(input in valid_character()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid character: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Character(_) => {},
_ => panic!("Expected Character declaration"),
}
}
}
#[test]
fn test_valid_template_parses(input in valid_template()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid template: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Template(_) => {},
_ => panic!("Expected Template declaration"),
}
}
}
#[test]
fn test_valid_enum_parses(input in valid_enum()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid enum: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Enum(_) => {},
_ => panic!("Expected Enum declaration"),
}
}
}
#[test]
fn test_valid_schedule_parses(input in valid_schedule()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid schedule: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Schedule(_) => {},
_ => panic!("Expected Schedule declaration"),
}
}
}
#[test]
fn test_valid_location_parses(input in valid_location()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid location: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Location(_) => {},
_ => panic!("Expected Location declaration"),
}
}
}
#[test]
fn test_valid_species_parses(input in valid_species()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid species: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Species(_) => {},
_ => panic!("Expected Species declaration"),
}
}
}
#[test]
fn test_valid_institution_parses(input in valid_institution()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid institution: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Institution(_) => {},
_ => panic!("Expected Institution declaration"),
}
}
}
#[test]
fn test_valid_relationship_parses(input in valid_relationship()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid relationship: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Relationship(_) => {},
_ => panic!("Expected Relationship declaration"),
}
}
}
#[test]
fn test_multiple_declarations_parse(
chars in prop::collection::vec(valid_character(), 0..3),
templates in prop::collection::vec(valid_template(), 0..3),
enums in prop::collection::vec(valid_enum(), 0..3),
) {
let mut all = chars;
all.extend(templates);
all.extend(enums);
let input = all.join("\n\n");
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
if !all.is_empty() {
assert!(result.is_ok(), "Failed to parse multiple declarations:\n{}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), all.len());
}
}
}
}
// ===== Life Arc generators =====
fn valid_comparison_expr() -> impl Strategy<Value = String> {
(
valid_ident(),
prop::sample::select(vec![">", ">=", "<", "<="]),
prop_oneof![
valid_int().prop_map(|n| n.to_string()),
valid_float().prop_map(|f| format!("{:.2}", f)),
],
)
.prop_map(|(ident, op, val)| format!("{} {} {}", ident, op, val))
}
fn valid_equality_expr() -> impl Strategy<Value = String> {
(
valid_ident(),
prop_oneof![
valid_int().prop_map(|n| n.to_string()),
valid_float().prop_map(|f| format!("{:.2}", f)),
valid_string().prop_map(|s| format!("\"{}\"", s)),
Just("true".to_string()),
Just("false".to_string()),
],
)
.prop_map(|(ident, val)| format!("{} is {}", ident, val))
}
fn valid_logical_and_expr() -> impl Strategy<Value = String> {
(valid_comparison_expr(), valid_comparison_expr())
.prop_map(|(left, right)| format!("{} and {}", left, right))
}
fn valid_logical_or_expr() -> impl Strategy<Value = String> {
(valid_ident(), valid_ident()).prop_map(|(left, right)| format!("{} or {}", left, right))
}
fn valid_logical_not_expr() -> impl Strategy<Value = String> {
valid_ident().prop_map(|ident| format!("not {}", ident))
}
fn valid_field_access_expr() -> impl Strategy<Value = String> {
(prop::sample::select(vec!["self", "other"]), valid_ident())
.prop_map(|(base, field)| format!("{}.{}", base, field))
}
fn valid_field_access_comparison() -> impl Strategy<Value = String> {
(
valid_field_access_expr(),
prop::sample::select(vec![">", ">=", "<", "<="]),
prop_oneof![valid_int().prop_map(|n| n.to_string()), valid_ident(),],
)
.prop_map(|(field, op, val)| format!("{} {} {}", field, op, val))
}
fn valid_transition_condition() -> impl Strategy<Value = String> {
prop_oneof![
valid_ident(), // Simple identifier
valid_int().prop_map(|n| n.to_string()), // Literal int
Just("true".to_string()), // Boolean literal
Just("false".to_string()),
valid_comparison_expr(), // Comparison expression
valid_equality_expr(), // Equality expression
valid_logical_and_expr(), // Logical AND
valid_logical_or_expr(), // Logical OR
valid_logical_not_expr(), // Logical NOT
valid_field_access_expr(), // Field access
valid_field_access_comparison(), // Field access with comparison
]
}
fn valid_transition() -> impl Strategy<Value = String> {
(valid_transition_condition(), valid_ident())
.prop_map(|(cond, target)| format!(" on {} -> {}", cond, target))
}
fn valid_arc_state() -> impl Strategy<Value = String> {
(
valid_ident(),
prop::collection::vec(valid_transition(), 0..3),
)
.prop_map(|(state_name, transitions)| {
let trans_str = transitions.join("\n");
if transitions.is_empty() {
format!(" state {} {{}}", state_name)
} else {
format!(" state {} {{\n{}\n }}", state_name, trans_str)
}
})
}
fn valid_life_arc() -> impl Strategy<Value = String> {
(
valid_ident(),
prop::collection::vec(valid_arc_state(), 1..5),
)
.prop_map(|(name, states)| {
let states_str = states.join("\n");
format!("life_arc {} {{\n{}\n}}", name, states_str)
})
}
// ===== Behavior Tree generators =====
fn valid_action_node() -> impl Strategy<Value = String> {
(
valid_ident(),
prop::option::of(prop::collection::vec(valid_field(), 0..3)),
)
.prop_map(|(name, params)| match params {
| None => name,
| Some(params) if params.is_empty() => format!("{}()", name),
| Some(params) => {
let params_str = params
.iter()
.map(|(k, v)| format!("{}: {}", k, v))
.collect::<Vec<_>>()
.join(", ");
format!("{}({})", name, params_str)
},
})
}
fn valid_behavior_node_depth(depth: u32) -> BoxedStrategy<String> {
if depth == 0 {
// Base case: just actions or subtrees
prop_oneof![
valid_action_node(),
valid_ident().prop_map(|name| format!("@{}", name)),
]
.boxed()
} else {
// Recursive case: can be action, subtree, selector, or sequence
let action = valid_action_node();
let subtree = valid_ident().prop_map(|name| format!("@{}", name));
let selector = prop::collection::vec(valid_behavior_node_depth(depth - 1), 1..3).prop_map(
|children| {
let children_str = children
.iter()
.map(|c| format!(" {}", c))
.collect::<Vec<_>>()
.join("\n");
format!("? {{\n{}\n }}", children_str)
},
);
let sequence = prop::collection::vec(valid_behavior_node_depth(depth - 1), 1..3).prop_map(
|children| {
let children_str = children
.iter()
.map(|c| format!(" {}", c))
.collect::<Vec<_>>()
.join("\n");
format!("> {{\n{}\n }}", children_str)
},
);
prop_oneof![action, subtree, selector, sequence,].boxed()
}
}
fn valid_behavior_tree() -> impl Strategy<Value = String> {
(
valid_ident(),
valid_behavior_node_depth(2), // Max depth 2 to keep tests fast
)
.prop_map(|(name, root)| format!("behavior {} {{\n {}\n}}", name, root))
}
proptest! {
#[test]
fn test_valid_life_arc_parses(input in valid_life_arc()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid life_arc: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::LifeArc(_) => {},
_ => panic!("Expected LifeArc declaration"),
}
}
}
#[test]
fn test_valid_behavior_tree_parses(input in valid_behavior_tree()) {
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse valid behavior: {}\nError: {:?}", input, result.err());
if let Ok(file) = result {
assert_eq!(file.declarations.len(), 1);
match &file.declarations[0] {
crate::syntax::ast::Declaration::Behavior(_) => {},
_ => panic!("Expected Behavior declaration"),
}
}
}
// ===== Comprehensive edge case tests =====
#[test]
fn test_life_arc_with_no_transitions(name in valid_ident(), state_name in valid_ident()) {
let input = format!("life_arc {} {{\n state {} {{}}\n}}", name, state_name);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse life arc with no transitions: {:?}", result.err());
}
#[test]
fn test_life_arc_with_multiple_transitions(
name in valid_ident(),
state_name in valid_ident(),
targets in prop::collection::vec(valid_ident(), 2..5)
) {
let transitions = targets.iter()
.map(|target| format!(" on ready -> {}", target))
.collect::<Vec<_>>()
.join("\n");
let input = format!("life_arc {} {{\n state {} {{\n{}\n }}\n}}", name, state_name, transitions);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse life arc with multiple transitions: {:?}", result.err());
}
#[test]
fn test_behavior_tree_deeply_nested(name in valid_ident()) {
let input = format!(
"behavior {} {{\n > {{\n ? {{\n > {{\n action\n }}\n }}\n }}\n}}",
name
);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse deeply nested behavior tree: {:?}", result.err());
}
#[test]
fn test_behavior_tree_with_action_params(
name in valid_ident(),
action in valid_ident(),
params in prop::collection::vec(valid_field(), 1..4)
) {
let params_str = params.iter()
.map(|(k, v)| format!("{}: {}", k, v))
.collect::<Vec<_>>()
.join(", ");
let input = format!("behavior {} {{\n {}({})\n}}", name, action, params_str);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse behavior with action params: {:?}", result.err());
}
#[test]
fn test_behavior_tree_with_subtree_reference(
name in valid_ident(),
subtree_path in prop::collection::vec(valid_ident(), 1..3)
) {
let path = subtree_path.join("::");
let input = format!("behavior {} {{\n @{}\n}}", name, path);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse behavior with subtree: {:?}", result.err());
}
#[test]
fn test_behavior_selector_with_many_children(
name in valid_ident(),
children in prop::collection::vec(valid_ident(), 2..10)
) {
let children_str = children.iter()
.map(|c| format!(" {}", c))
.collect::<Vec<_>>()
.join("\n");
let input = format!("behavior {} {{\n ? {{\n{}\n }}\n}}", name, children_str);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse selector with many children: {:?}", result.err());
}
#[test]
fn test_behavior_sequence_with_many_children(
name in valid_ident(),
children in prop::collection::vec(valid_ident(), 2..10)
) {
let children_str = children.iter()
.map(|c| format!(" {}", c))
.collect::<Vec<_>>()
.join("\n");
let input = format!("behavior {} {{\n > {{\n{}\n }}\n}}", name, children_str);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse sequence with many children: {:?}", result.err());
}
#[test]
fn test_life_arc_transition_with_literal_condition(
name in valid_ident(),
state_name in valid_ident(),
target in valid_ident(),
val in valid_int()
) {
let input = format!("life_arc {} {{\n state {} {{\n on {} -> {}\n }}\n}}", name, state_name, val, target);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse life arc with literal condition: {:?}", result.err());
}
#[test]
fn test_life_arc_transition_with_bool_condition(
name in valid_ident(),
state_name in valid_ident(),
target in valid_ident(),
val in prop::sample::select(vec![true, false])
) {
let input = format!("life_arc {} {{\n state {} {{\n on {} -> {}\n }}\n}}", name, state_name, val, target);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse life arc with bool condition: {:?}", result.err());
}
// ===== Comparison expression tests =====
#[test]
fn test_comparison_all_operators(
ident in valid_ident(),
val in valid_int()
) {
for op in &[">", ">=", "<", "<="] {
let comp = format!("{} {} {}", ident, op, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse comparison '{}': {:?}", comp, result.err());
}
}
#[test]
fn test_comparison_with_int(comp in valid_comparison_expr()) {
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse comparison '{}': {:?}", comp, result.err());
}
#[test]
fn test_comparison_gt(ident in valid_ident(), val in valid_int()) {
let comp = format!("{} > {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse > comparison: {:?}", result.err());
}
#[test]
fn test_comparison_gte(ident in valid_ident(), val in valid_int()) {
let comp = format!("{} >= {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse >= comparison: {:?}", result.err());
}
#[test]
fn test_comparison_lt(ident in valid_ident(), val in valid_int()) {
let comp = format!("{} < {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse < comparison: {:?}", result.err());
}
#[test]
fn test_comparison_lte(ident in valid_ident(), val in valid_int()) {
let comp = format!("{} <= {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse <= comparison: {:?}", result.err());
}
#[test]
fn test_comparison_with_float(
ident in valid_ident(),
val in valid_float()
) {
let comp = format!("{} > {:.2}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse float comparison: {:?}", result.err());
}
#[test]
fn test_comparison_with_negative_int(
ident in valid_ident(),
val in -100i64..0i64
) {
let comp = format!("{} < {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse comparison with negative: {:?}", result.err());
}
#[test]
fn test_multiple_transitions_with_comparisons(
name in valid_ident(),
state_name in valid_ident(),
comparisons in prop::collection::vec(
(valid_ident(), prop::sample::select(vec![">", "<", ">=", "<="]), valid_int(), valid_ident()),
2..5
)
) {
let transitions = comparisons.iter()
.map(|(var, op, val, target)| format!(" on {} {} {} -> {}", var, op, val, target))
.collect::<Vec<_>>()
.join("\n");
let input = format!("life_arc {} {{\n state {} {{\n{}\n }}\n}}", name, state_name, transitions);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse multiple comparison transitions: {:?}", result.err());
}
// ===== Equality expression tests =====
#[test]
fn test_equality_with_string(
ident in valid_ident(),
val in valid_string()
) {
let comp = format!("{} is \"{}\"", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse equality with string: {:?}", result.err());
}
#[test]
fn test_equality_with_int(
ident in valid_ident(),
val in valid_int()
) {
let comp = format!("{} is {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse equality with int: {:?}", result.err());
}
#[test]
fn test_equality_with_float(
ident in valid_ident(),
val in valid_float()
) {
let comp = format!("{} is {:.2}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse equality with float: {:?}", result.err());
}
#[test]
fn test_equality_with_bool(
ident in valid_ident(),
val in prop::sample::select(vec![true, false])
) {
let comp = format!("{} is {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse equality with bool: {:?}", result.err());
}
#[test]
fn test_equality_multiple_transitions(
name in valid_ident(),
state_name in valid_ident(),
equalities in prop::collection::vec(
(valid_ident(), prop_oneof![
valid_int().prop_map(|n| n.to_string()),
valid_string().prop_map(|s| format!("\"{}\"", s)),
Just("true".to_string()),
Just("false".to_string()),
], valid_ident()),
2..5
)
) {
let transitions = equalities.iter()
.map(|(var, val, target)| format!(" on {} is {} -> {}", var, val, target))
.collect::<Vec<_>>()
.join("\n");
let input = format!("life_arc {} {{\n state {} {{\n{}\n }}\n}}", name, state_name, transitions);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse multiple equality transitions: {:?}", result.err());
}
#[test]
fn test_equality_mixed_with_comparisons(
name in valid_ident(),
state_name in valid_ident()
) {
let input = format!(
"life_arc {} {{\n state {} {{\n on age > 12 -> teen\n on status is active -> active_state\n on energy < 0.3 -> tired\n on completed is true -> done\n }}\n}}",
name, state_name
);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse mixed equality and comparisons: {:?}", result.err());
}
// ===== Logical operator tests =====
#[test]
fn test_logical_and(
ident1 in valid_ident(),
ident2 in valid_ident(),
val1 in valid_int(),
val2 in valid_int()
) {
let cond = format!("{} > {} and {} < {}", ident1, val1, ident2, val2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'and' operator: {:?}", result.err());
}
#[test]
fn test_logical_or(
ident1 in valid_ident(),
ident2 in valid_ident(),
val1 in valid_int(),
val2 in valid_int()
) {
let cond = format!("{} > {} or {} < {}", ident1, val1, ident2, val2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'or' operator: {:?}", result.err());
}
#[test]
fn test_logical_not_with_identifier(ident in valid_ident()) {
let cond = format!("not {}", ident);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'not' operator with identifier: {:?}", result.err());
}
#[test]
fn test_logical_not_with_comparison(
ident in valid_ident(),
val in valid_int()
) {
let cond = format!("not {} > {}", ident, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'not' with comparison: {:?}", result.err());
}
#[test]
fn test_and_with_equality(
ident1 in valid_ident(),
ident2 in valid_ident(),
val in valid_string()
) {
let cond = format!("{} is true and {} is \"{}\"", ident1, ident2, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'and' with equality: {:?}", result.err());
}
#[test]
fn test_or_with_equality(
ident1 in valid_ident(),
ident2 in valid_ident()
) {
let cond = format!("{} is false or {} is true", ident1, ident2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'or' with equality: {:?}", result.err());
}
#[test]
fn test_chained_and(
ident1 in valid_ident(),
ident2 in valid_ident(),
ident3 in valid_ident(),
val1 in valid_int(),
val2 in valid_int(),
val3 in valid_int()
) {
let cond = format!("{} > {} and {} < {} and {} is {}", ident1, val1, ident2, val2, ident3, val3);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse chained 'and': {:?}", result.err());
}
#[test]
fn test_chained_or(
ident1 in valid_ident(),
ident2 in valid_ident(),
ident3 in valid_ident()
) {
let cond = format!("{} or {} or {}", ident1, ident2, ident3);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse chained 'or': {:?}", result.err());
}
#[test]
fn test_mixed_and_or(
ident1 in valid_ident(),
ident2 in valid_ident(),
ident3 in valid_ident(),
val1 in valid_int(),
val2 in valid_int()
) {
// Tests precedence: 'and' binds tighter than 'or'
// This should parse as: (ident1 > val1 and ident2 < val2) or ident3
let cond = format!("{} > {} and {} < {} or {}", ident1, val1, ident2, val2, ident3);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse mixed 'and'/'or': {:?}", result.err());
}
#[test]
fn test_not_not(ident in valid_ident()) {
let cond = format!("not not {}", ident);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse double 'not': {:?}", result.err());
}
#[test]
fn test_not_with_and(
ident1 in valid_ident(),
ident2 in valid_ident()
) {
// Tests that 'not' binds tighter than 'and'
// This should parse as: (not ident1) and ident2
let cond = format!("not {} and {}", ident1, ident2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse 'not' with 'and': {:?}", result.err());
}
#[test]
fn test_complex_nested_logic(
name in valid_ident(),
state_name in valid_ident()
) {
let input = format!(
"life_arc {} {{\n state {} {{\n on age > 18 and status is active and energy > 0.5 -> state1\n on tired or hungry or sick -> state2\n on not ready and not completed -> state3\n on health > 50 and not sick or emergency -> state4\n }}\n}}",
name, state_name
);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse complex nested logic: {:?}", result.err());
}
// ===== Field access tests =====
#[test]
fn test_field_access_self(field in valid_ident(), val in valid_int()) {
let cond = format!("self.{} > {}", field, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse self field access: {:?}", result.err());
}
#[test]
fn test_field_access_other(field in valid_ident(), val in valid_int()) {
let cond = format!("other.{} < {}", field, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse other field access: {:?}", result.err());
}
#[test]
fn test_field_access_with_equality(
field in valid_ident(),
val in valid_string()
) {
let cond = format!("self.{} is \"{}\"", field, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access with equality: {:?}", result.err());
}
#[test]
fn test_field_access_with_bool(field in valid_ident()) {
let cond = format!("self.{} is true", field);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access with bool: {:?}", result.err());
}
#[test]
fn test_nested_field_access(
field1 in valid_ident(),
field2 in valid_ident(),
val in valid_int()
) {
let cond = format!("self.{}.{} > {}", field1, field2, val);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse nested field access: {:?}", result.err());
}
#[test]
fn test_field_access_with_and(
field1 in valid_ident(),
field2 in valid_ident(),
val1 in valid_int(),
val2 in valid_int()
) {
let cond = format!("self.{} > {} and other.{} < {}", field1, val1, field2, val2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access with 'and': {:?}", result.err());
}
#[test]
fn test_field_access_with_or(
field1 in valid_ident(),
field2 in valid_ident()
) {
let cond = format!("self.{} or other.{}", field1, field2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access with 'or': {:?}", result.err());
}
#[test]
fn test_field_access_with_not(field in valid_ident()) {
let cond = format!("not self.{}", field);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access with 'not': {:?}", result.err());
}
#[test]
fn test_field_access_both_sides(
field1 in valid_ident(),
field2 in valid_ident()
) {
let cond = format!("self.{} > other.{}", field1, field2);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access on both sides: {:?}", result.err());
}
#[test]
fn test_field_access_vs_identifier(
field in valid_ident(),
ident in valid_ident()
) {
let cond = format!("self.{} > {}", field, ident);
let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse field access vs identifier: {:?}", result.err());
}
#[test]
fn test_complex_field_access(
name in valid_ident(),
state_name in valid_ident()
) {
let input = format!(
"life_arc {} {{\n state {} {{\n on self.age > 18 and self.status is active -> state1\n on other.bond < 0.3 or self.energy < 0.2 -> state2\n on not self.ready and other.level > 5 -> state3\n on self.health > other.health -> state4\n }}\n}}",
name, state_name
);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse complex field access: {:?}", result.err());
}
}
// ===== Prose block property tests =====
fn valid_prose_content() -> impl Strategy<Value = String> {
// Prose content without --- at line start
prop::string::string_regex("[^\n]*(\n[^-][^\n]*)*").unwrap()
}
proptest! {
#[test]
fn test_prose_block_roundtrip(
tag in valid_ident(),
content in valid_prose_content()
) {
let input = format!("---{}\n{}\n---", tag, content);
let lexer = Lexer::new(&input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match &tokens[0] {
Token::ProseBlock(pb) => {
assert_eq!(pb.tag, tag);
assert_eq!(pb.content.trim(), content.trim());
}
_ => panic!("Expected ProseBlock token"),
}
}
#[test]
fn test_character_with_prose(
name in valid_ident(),
tag in valid_ident(),
content in valid_prose_content()
) {
let input = format!(
"character {} {{\n {}: ---{}\n{}\n---\n}}",
name, tag, tag, content
);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse character with prose:\n{}\nError: {:?}", input, result.err());
}
}
// ===== Edge case tests =====
#[cfg(test)]
mod edge_cases {
use super::*;
#[test]
fn test_empty_input_parses() {
let lexer = Lexer::new("");
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok());
assert_eq!(result.unwrap().declarations.len(), 0);
}
proptest! {
#[test]
fn test_only_whitespace_parses(ws in "[ \t\n]{1,100}") {
let lexer = Lexer::new(&ws);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok());
assert_eq!(result.unwrap().declarations.len(), 0);
}
#[test]
fn test_only_comments_parses(
n in 1usize..10,
comment_content in valid_string()
) {
let input = (0..n)
.map(|_| format!("// {}", comment_content))
.collect::<Vec<_>>()
.join("\n");
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok());
assert_eq!(result.unwrap().declarations.len(), 0);
}
#[test]
fn test_unicode_in_strings(s in "[^\"\\\\ ]{1,20}") {
let input = format!("character Test {{ name: \"{}\" }}", s);
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
// Should either parse or fail gracefully
let _ = result;
}
#[test]
fn test_nested_objects(depth in 1usize..4) {
let mut input = String::from("character Test { data: ");
for _ in 0..depth {
input.push_str("{ inner: ");
}
input.push_str("42");
for _ in 0..depth {
input.push_str(" }");
}
input.push_str(" }");
let lexer = Lexer::new(&input);
let parser = FileParser::new();
let result = parser.parse(lexer);
assert!(result.is_ok(), "Failed to parse nested objects (depth {}): {}", depth, input);
}
}
}