feat: implement storybook DSL with template composition and validation

Add complete domain-specific language for authoring narrative content for
agent simulations.

Features:
- Complete parser using LALRPOP + logos lexer
- Template composition (includes + multiple inheritance)
- Strict mode validation for templates
- Reserved keyword protection
- Semantic validators (trait ranges, schedule overlaps, life arcs, behaviors)
- Name resolution and cross-reference tracking
- CLI tool (validate, inspect, query commands)
- Query API with filtering
- 260 comprehensive tests (unit, integration, property-based)

Implementation phases:
- Phase 1 (Parser): Complete
- Phase 2 (Resolution + Validation): Complete
- Phase 3 (Public API + CLI): Complete

BREAKING CHANGE: Initial implementation
This commit is contained in:
2026-02-08 13:24:35 +00:00
commit 9c20dd4092
59 changed files with 25484 additions and 0 deletions

188
src/bin/sb.rs Normal file
View File

@@ -0,0 +1,188 @@
//! Storybook CLI tool
//!
//! Commands:
//! - `sb validate <path>` - Parse and validate entire project
//! - `sb inspect <entity>` - Show fully resolved entity details
//! - `sb watch <path>` - Continuous validation on file changes
use std::path::PathBuf;
use clap::{
Parser,
Subcommand,
};
use miette::{
IntoDiagnostic,
Result,
};
use storybook::Project;
#[derive(Parser)]
#[command(name = "sb")]
#[command(about = "Storybook DSL tool", long_about = None)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Validate a storybook project or file
Validate {
/// Path to a .sb file or directory containing .sb files
#[arg(default_value = ".")]
path: PathBuf,
},
/// Inspect a specific entity
Inspect {
/// Entity name to inspect
name: String,
/// Path to the storybook project directory
#[arg(short, long, default_value = ".")]
path: PathBuf,
},
/// Watch a project for changes and re-validate
Watch {
/// Path to the storybook project directory
#[arg(default_value = ".")]
path: PathBuf,
},
}
fn main() -> Result<()> {
let cli = Cli::parse();
match cli.command {
| Commands::Validate { path } => validate(&path)?,
| Commands::Inspect { name, path } => inspect(&name, &path)?,
| Commands::Watch { path } => watch(&path)?,
}
Ok(())
}
fn validate(path: &PathBuf) -> Result<()> {
println!("Validating storybook at: {}", path.display());
let project = Project::load(path)?;
let char_count = project.characters().count();
let rel_count = project.relationships().count();
let inst_count = project.institutions().count();
let sched_count = project.schedules().count();
let behavior_count = project.behaviors().count();
let arc_count = project.life_arcs().count();
println!("✓ Validation successful!");
println!();
println!("Project contents:");
println!(" Characters: {}", char_count);
println!(" Relationships: {}", rel_count);
println!(" Institutions: {}", inst_count);
println!(" Schedules: {}", sched_count);
println!(" Behaviors: {}", behavior_count);
println!(" Life Arcs: {}", arc_count);
Ok(())
}
fn inspect(name: &str, path: &PathBuf) -> Result<()> {
println!("Loading project from: {}", path.display());
let project = Project::load(path)?;
// Try to find the entity as different types
if let Some(character) = project.find_character(name) {
println!("Character: {}", character.name);
println!("Fields:");
for (field_name, value) in &character.fields {
println!(" {}: {:?}", field_name, value);
}
println!("Prose blocks:");
for (tag, prose) in &character.prose_blocks {
println!(" ---{}", tag);
println!("{}", prose.content);
println!(" ---");
}
return Ok(());
}
if let Some(relationship) = project.find_relationship(name) {
println!("Relationship: {}", relationship.name);
println!("Participants:");
for participant in &relationship.participants {
println!(" {}", participant.name.join("::"));
}
println!("Fields:");
for (field_name, value) in &relationship.fields {
println!(" {}: {:?}", field_name, value);
}
return Ok(());
}
if let Some(institution) = project.find_institution(name) {
println!("Institution: {}", institution.name);
println!("Fields:");
for (field_name, value) in &institution.fields {
println!(" {}: {:?}", field_name, value);
}
return Ok(());
}
println!("Entity '{}' not found in project", name);
Ok(())
}
fn watch(path: &PathBuf) -> Result<()> {
use std::sync::mpsc::channel;
use notify::{
Event,
EventKind,
RecursiveMode,
Watcher,
};
println!("Watching for changes in: {}", path.display());
println!("Press Ctrl+C to stop");
println!();
// Initial validation
match Project::load(path) {
| Ok(_) => println!("✓ Initial validation successful"),
| Err(e) => println!("✗ Initial validation failed: {}", e),
}
let (tx, rx) = channel::<notify::Result<Event>>();
let mut watcher = notify::recommended_watcher(tx).into_diagnostic()?;
watcher
.watch(path, RecursiveMode::Recursive)
.into_diagnostic()?;
for res in rx {
match res {
| Ok(event) => {
// Only re-validate on write events for .sb files
if matches!(event.kind, EventKind::Modify(_) | EventKind::Create(_)) &&
event
.paths
.iter()
.any(|p| p.extension().and_then(|s| s.to_str()) == Some("sb"))
{
println!("\n--- Change detected, re-validating... ---");
match Project::load(path) {
| Ok(_) => println!("✓ Validation successful"),
| Err(e) => println!("✗ Validation failed: {}", e),
}
}
},
| Err(e) => println!("Watch error: {:?}", e),
}
}
Ok(())
}

506
src/error_showcase_tests.rs Normal file
View File

@@ -0,0 +1,506 @@
//! Functional tests that showcase every error type with its helpful message
//!
//! These tests are designed to:
//! 1. Ensure every error type can be triggered
//! 2. Document what causes each error
//! 3. Verify that error messages are helpful and clear
use std::collections::HashSet;
use crate::{
resolve::{
convert::convert_file,
names::NameTable,
validate::{
validate_behavior_tree_actions,
validate_life_arc_transitions,
validate_relationship_bonds,
validate_schedule_overlaps,
validate_trait_ranges,
},
ErrorCollector,
ResolveError,
},
syntax::{
ast::*,
lexer::Lexer,
FileParser,
},
Project,
};
// ===== Parse Errors =====
#[test]
fn test_unexpected_token_error() {
let source = r#"
character Martha {
age 34
}
"#;
// Missing colon after 'age' - should trigger UnexpectedToken
let lexer = Lexer::new(source);
let result = FileParser::new().parse(lexer);
assert!(result.is_err(), "Should fail with unexpected token");
println!("\n=== UnexpectedToken Error ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
#[test]
fn test_unexpected_eof_error() {
let source = r#"
character Martha {
age: 34
"#;
// Missing closing brace - should trigger UnexpectedEof
let lexer = Lexer::new(source);
let result = FileParser::new().parse(lexer);
assert!(result.is_err(), "Should fail with unexpected EOF");
println!("\n=== UnexpectedEof Error ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
#[test]
fn test_invalid_token_error() {
let source = "character Martha { age: @#$ }";
// Invalid character sequence - should trigger InvalidToken
let lexer = Lexer::new(source);
let result = FileParser::new().parse(lexer);
assert!(result.is_err(), "Should fail with invalid token");
println!("\n=== InvalidToken Error ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
#[test]
fn test_unclosed_prose_block_error() {
let source = r#"
character Martha {
backstory: ---backstory
This is Martha's backstory.
It goes on and on...
But it never closes!
}
"#;
// Prose block never closed - should trigger UnclosedProseBlock
let lexer = Lexer::new(source);
let result = FileParser::new().parse(lexer);
assert!(result.is_err(), "Should fail with unclosed prose block");
println!("\n=== UnclosedProseBlock Error ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
// ===== Resolution Errors =====
#[test]
fn test_name_not_found_error() {
let file = File {
declarations: vec![],
};
let table = NameTable::from_file(&file).unwrap();
let result = table.lookup(&["NonExistent".to_string()]);
assert!(result.is_none(), "Should not find non-existent name");
// Create the actual error
let error = ResolveError::NameNotFound {
name: "NonExistent".to_string(),
suggestion: table.find_suggestion("NonExistent"),
};
println!("\n=== NameNotFound Error ===");
println!("{:?}", error);
}
#[test]
fn test_duplicate_definition_error() {
let file = File {
declarations: vec![
Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
}),
Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(20, 30),
}),
],
};
let result = NameTable::from_file(&file);
assert!(result.is_err(), "Should fail with duplicate definition");
println!("\n=== DuplicateDefinition Error ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
#[test]
fn test_circular_dependency_error() {
// Manually create a circular dependency error for demonstration
let error = ResolveError::CircularDependency {
cycle: "Template A -> Template B -> Template A".to_string(),
};
println!("\n=== CircularDependency Error ===");
println!("{:?}", error);
}
#[test]
fn test_invalid_field_access_error() {
let error = ResolveError::InvalidFieldAccess {
message: "Field 'nonexistent' does not exist on character 'Martha'".to_string(),
};
println!("\n=== InvalidFieldAccess Error ===");
println!("{:?}", error);
}
#[test]
fn test_type_mismatch_error() {
let error = ResolveError::TypeMismatch {
message: "Expected number for field 'age', but got string \"thirty\"".to_string(),
};
println!("\n=== TypeMismatch Error ===");
println!("{:?}", error);
}
#[test]
fn test_validation_error_generic() {
let error = ResolveError::ValidationError {
message: "Cannot append field 'age': field already exists".to_string(),
help: Some("The 'append' operation is used to add new fields. Use 'set' to update existing fields.".to_string()),
};
println!("\n=== ValidationError Error ===");
println!("{:?}", error);
}
// ===== Validation Errors =====
#[test]
fn test_unknown_life_arc_state_error() {
let life_arc = LifeArc {
name: "Growth".to_string(),
states: vec![
ArcState {
name: "child".to_string(),
transitions: vec![Transition {
to: "adult".to_string(), // 'adult' exists
condition: Expr::BoolLit(true),
span: Span::new(0, 10),
}],
span: Span::new(0, 50),
},
ArcState {
name: "adult".to_string(),
transitions: vec![Transition {
to: "senior".to_string(), // 'senior' doesn't exist!
condition: Expr::BoolLit(true),
span: Span::new(50, 60),
}],
span: Span::new(50, 100),
},
],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_life_arc_transitions(&life_arc, &mut collector);
assert!(collector.has_errors(), "Should fail with unknown state");
println!("\n=== UnknownLifeArcState Error ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_trait_out_of_range_error_bond() {
let fields = vec![Field {
name: "bond".to_string(),
value: Value::Float(1.5), // Out of range!
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(
collector.has_errors(),
"Should fail with out of range trait"
);
println!("\n=== TraitOutOfRange Error (bond too high) ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_trait_out_of_range_error_age() {
let fields = vec![Field {
name: "age".to_string(),
value: Value::Int(200), // Out of range!
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors(), "Should fail with out of range age");
println!("\n=== TraitOutOfRange Error (age too high) ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_trait_out_of_range_negative() {
let fields = vec![Field {
name: "trust".to_string(),
value: Value::Float(-0.2), // Negative!
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors(), "Should fail with negative trait");
println!("\n=== TraitOutOfRange Error (negative value) ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_schedule_overlap_error() {
let schedule = Schedule {
name: "DailyRoutine".to_string(),
blocks: vec![
ScheduleBlock {
activity: "work".to_string(),
start: Time {
hour: 8,
minute: 0,
second: 0,
},
end: Time {
hour: 12,
minute: 30,
second: 0,
},
span: Span::new(0, 50),
},
ScheduleBlock {
activity: "lunch".to_string(),
start: Time {
hour: 12,
minute: 0, // Overlaps with work!
second: 0,
},
end: Time {
hour: 13,
minute: 0,
second: 0,
},
span: Span::new(50, 100),
},
],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_schedule_overlaps(&schedule, &mut collector);
assert!(collector.has_errors(), "Should fail with schedule overlap");
println!("\n=== ScheduleOverlap Error ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_unknown_behavior_action_error() {
let tree = Behavior {
name: "WorkDay".to_string(),
root: BehaviorNode::Action("unknown_action".to_string(), vec![]),
span: Span::new(0, 50),
};
// Create a registry with some known actions (but not "unknown_action")
let mut action_registry = HashSet::new();
action_registry.insert("walk".to_string());
action_registry.insert("work".to_string());
action_registry.insert("eat".to_string());
let mut collector = ErrorCollector::new();
validate_behavior_tree_actions(&tree, &action_registry, &mut collector);
assert!(collector.has_errors(), "Should fail with unknown action");
println!("\n=== UnknownBehaviorAction Error ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_relationship_bond_out_of_range() {
let relationship = Relationship {
name: "Test".to_string(),
participants: vec![],
fields: vec![Field {
name: "bond".to_string(),
value: Value::Float(2.5), // Way out of range!
span: Span::new(0, 10),
}],
span: Span::new(0, 50),
};
let mut collector = ErrorCollector::new();
validate_relationship_bonds(&[relationship], &mut collector);
assert!(collector.has_errors(), "Should fail with bond out of range");
println!("\n=== Relationship Bond Out of Range ===");
if collector.has_errors() {
let result = collector.into_result(());
if let Err(e) = result {
println!("{:?}", e);
}
}
}
#[test]
fn test_duplicate_field_in_convert() {
let character = Character {
name: "Martha".to_string(),
fields: vec![
Field {
name: "age".to_string(),
value: Value::Int(34),
span: Span::new(0, 10),
},
Field {
name: "age".to_string(), // Duplicate!
value: Value::Int(35),
span: Span::new(10, 20),
},
],
template: None,
span: Span::new(0, 50),
};
let file = File {
declarations: vec![Declaration::Character(character)],
};
let result = convert_file(&file);
assert!(result.is_err(), "Should fail with duplicate field");
println!("\n=== Duplicate Field Error (in conversion) ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
// ===== Project Errors =====
#[test]
fn test_invalid_project_structure_no_directory() {
let result = Project::load("/nonexistent/path/to/project");
assert!(result.is_err(), "Should fail with invalid structure");
println!("\n=== InvalidStructure Error (directory doesn't exist) ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
#[test]
fn test_invalid_project_structure_not_directory() {
// Try to load a file as if it were a directory
let result = Project::load("Cargo.toml");
assert!(result.is_err(), "Should fail - file not directory");
println!("\n=== InvalidStructure Error (not a directory) ===");
if let Err(e) = result {
println!("{:?}", e);
}
}
// ===== Showcase All Errors =====
#[test]
#[ignore] // Run with: cargo test error_showcase -- --ignored --nocapture
fn error_showcase_all() {
println!("\n\n");
println!("╔════════════════════════════════════════════════════════════════╗");
println!("║ STORYBOOK ERROR MESSAGES SHOWCASE ║");
println!("║ Every error type with helpful hints for users ║");
println!("╚════════════════════════════════════════════════════════════════╝");
test_unexpected_token_error();
test_unexpected_eof_error();
test_invalid_token_error();
test_unclosed_prose_block_error();
test_name_not_found_error();
test_duplicate_definition_error();
test_circular_dependency_error();
test_invalid_field_access_error();
test_type_mismatch_error();
test_validation_error_generic();
test_unknown_life_arc_state_error();
test_trait_out_of_range_error_bond();
test_trait_out_of_range_error_age();
test_trait_out_of_range_negative();
test_schedule_overlap_error();
test_unknown_behavior_action_error();
test_relationship_bond_out_of_range();
test_duplicate_field_in_convert();
test_invalid_project_structure_no_directory();
test_invalid_project_structure_not_directory();
println!("\n\n");
println!("╔════════════════════════════════════════════════════════════════╗");
println!("║ SHOWCASE COMPLETE ║");
println!("╚════════════════════════════════════════════════════════════════╝");
}

418
src/lib.rs Normal file
View File

@@ -0,0 +1,418 @@
//! Storybook - A DSL for authoring narrative content for agent simulations
//!
//! This library provides parsing, resolution, and validation for `.sb` files.
//!
//! # Example
//!
//! ```no_run
//! use storybook::{
//! query::CharacterQuery,
//! Project,
//! };
//!
//! // Load and validate a storybook project
//! let project = Project::load("path/to/storybook")?;
//!
//! // Query for characters
//! for character in project.characters() {
//! println!("Character: {}", character.name);
//! }
//!
//! // Filter characters by age and traits
//! for character in project
//! .characters()
//! .with_age_range(25, 40)
//! .with_trait("trust", 0.7, 1.0)
//! {
//! println!("Trusted character: {}", character.name);
//! }
//!
//! # Ok::<(), Box<dyn std::error::Error>>(())
//! ```
// Suppress false positive warnings from thiserror macro
#![allow(unused_assignments)]
pub mod query;
pub mod resolve;
pub mod syntax;
pub mod types;
#[cfg(test)]
mod error_showcase_tests;
use std::{
collections::HashSet,
path::{
Path,
PathBuf,
},
};
use miette::Diagnostic;
pub use resolve::{
NameTable,
QualifiedPath,
};
use thiserror::Error;
pub use types::*;
use crate::{
resolve::validate,
syntax::{
ast::File,
FileParser,
},
};
/// Errors that can occur when working with projects
#[derive(Error, Debug, Diagnostic)]
pub enum ProjectError {
#[error("Failed to parse file: {path}")]
#[diagnostic(help("There's a syntax error in this file. Check the error details above for the specific issue. Common problems: missing braces, unclosed strings, incorrect syntax for declarations."))]
#[allow(dead_code)]
ParseError {
path: String,
#[source]
source: Box<dyn std::error::Error + Send + Sync>,
},
#[error(transparent)]
#[diagnostic(transparent)]
ResolveError(#[from] resolve::ResolveError),
#[error("IO error reading files: {0}")]
#[diagnostic(help("There was a problem reading or writing files. Check that: the file exists, you have permission to read it, the path is correct, and there's enough disk space."))]
IoError(#[from] std::io::Error),
#[error("Invalid project structure: {0}")]
#[diagnostic(help("The project directory structure is not valid. Make sure: the path exists and is a directory, it contains at least one .sb file, you have permission to access it."))]
InvalidStructure(String),
}
/// Result type for project operations
pub type Result<T> = std::result::Result<T, ProjectError>;
/// A loaded and validated Storybook project
#[derive(Debug, Clone)]
pub struct Project {
/// Root path of the project (either a file or directory)
pub root: PathBuf,
/// All resolved files in the project
pub files: Vec<ResolvedFile>,
/// Combined name table across all files
pub name_table: NameTable,
}
impl Project {
/// Load a storybook project from a file or directory
///
/// This will:
/// 1. Find all `.sb` files (or load the single file if path is a file)
/// 2. Parse each file
/// 3. Build a combined name table
/// 4. Resolve all cross-references
/// 5. Validate semantic constraints
///
/// # Errors
///
/// Returns an error if:
/// - The path doesn't exist
/// - Any `.sb` file fails to parse
/// - Name resolution fails (undefined references, duplicates, etc.)
/// - Semantic validation fails (invalid ranges, overlaps, etc.)
pub fn load<P: AsRef<Path>>(root: P) -> Result<Self> {
let root = root.as_ref().to_path_buf();
if !root.exists() {
return Err(ProjectError::InvalidStructure(format!(
"Path does not exist: {}",
root.display()
)));
}
// Handle both single files and directories
let sb_files = if root.is_file() {
// Single file - validate it's a .sb file
if root.extension().and_then(|s| s.to_str()) != Some("sb") {
return Err(ProjectError::InvalidStructure(format!(
"File must have .sb extension: {}",
root.display()
)));
}
vec![root.clone()]
} else if root.is_dir() {
// Directory - find all .sb files
Self::find_sb_files(&root)?
} else {
return Err(ProjectError::InvalidStructure(format!(
"Path is neither a file nor a directory: {}",
root.display()
)));
};
if sb_files.is_empty() {
return Err(ProjectError::InvalidStructure(
"No .sb files found in project".to_string(),
));
}
// Parse all files
let mut parsed_files = Vec::new();
for path in &sb_files {
let file = Self::parse_file(path)?;
parsed_files.push(file);
}
// Build combined name table from all files
let name_table = NameTable::from_files(&parsed_files)?;
// Build action registry from schema if it exists
let action_registry = Self::build_action_registry(&root)?;
// Validate and convert all files
let mut resolved_files = Vec::new();
for file in parsed_files {
// First validate
validate::validate_file(&file, &action_registry)?;
// Then convert AST to resolved types
let declarations = resolve::convert::convert_file(&file)?;
resolved_files.push(ResolvedFile { declarations });
}
Ok(Project {
root,
files: resolved_files,
name_table,
})
}
/// Parse a single .sb file
fn parse_file(path: &Path) -> Result<File> {
let content = std::fs::read_to_string(path)?;
let lexer = syntax::lexer::Lexer::new(&content);
FileParser::new()
.parse(lexer)
.map_err(|e| ProjectError::ParseError {
path: path.display().to_string(),
source: Box::new(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!("Parse error: {:?}", e),
)),
})
}
/// Find all .sb files in a directory recursively
fn find_sb_files(root: &Path) -> Result<Vec<PathBuf>> {
let mut files = Vec::new();
for entry in walkdir::WalkDir::new(root)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())
{
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) == Some("sb") {
files.push(path.to_path_buf());
}
}
Ok(files)
}
/// Build action registry from schema files if they exist
///
/// Looks for `schema/actions.sb` and extracts action names from `enum
/// Action` declaration. If no schema exists, returns empty registry (no
/// validation).
///
/// # Future Tooling
///
/// TODO: Add commands to help maintain the Action enum:
/// - `sb actions check` - find actions used in behavior trees but not in
/// enum
/// - `sb actions suggest` - auto-generate/update enum Action from usage
/// - `sb actions unused` - find enum variants never used in behavior trees
fn build_action_registry(root: &Path) -> Result<HashSet<String>> {
let schema_path = if root.is_file() {
// If root is a file, look for schema in parent directory
root.parent().map(|p| p.join("schema").join("actions.sb"))
} else {
// If root is a directory, look for schema subdirectory
Some(root.join("schema").join("actions.sb"))
};
let schema_path = match schema_path {
| Some(path) if path.exists() => path,
| _ => return Ok(HashSet::new()), // No schema, return empty registry
};
// Parse the schema file
let file = Self::parse_file(&schema_path)?;
// Find enum Action declaration and extract variants
let mut registry = HashSet::new();
for decl in &file.declarations {
if let syntax::ast::Declaration::Enum(enum_decl) = decl {
if enum_decl.name == "Action" {
for variant in &enum_decl.variants {
registry.insert(variant.clone());
}
}
}
}
Ok(registry)
}
/// Get all characters across all files
pub fn characters(&self) -> impl Iterator<Item = &ResolvedCharacter> {
self.files.iter().flat_map(|f| f.characters())
}
/// Get all relationships across all files
pub fn relationships(&self) -> impl Iterator<Item = &ResolvedRelationship> {
self.files.iter().flat_map(|f| f.relationships())
}
/// Get all institutions across all files
pub fn institutions(&self) -> impl Iterator<Item = &ResolvedInstitution> {
self.files.iter().flat_map(|f| f.institutions())
}
/// Get all schedules across all files
pub fn schedules(&self) -> impl Iterator<Item = &ResolvedSchedule> {
self.files.iter().flat_map(|f| f.schedules())
}
/// Get all behavior trees across all files
pub fn behaviors(&self) -> impl Iterator<Item = &ResolvedBehavior> {
self.files.iter().flat_map(|f| f.behaviors())
}
/// Get all life arcs across all files
pub fn life_arcs(&self) -> impl Iterator<Item = &ResolvedLifeArc> {
self.files.iter().flat_map(|f| f.life_arcs())
}
/// Get all locations across all files
pub fn locations(&self) -> impl Iterator<Item = &ResolvedLocation> {
self.files.iter().flat_map(|f| f.locations())
}
/// Get all species across all files
pub fn species(&self) -> impl Iterator<Item = &ResolvedSpecies> {
self.files.iter().flat_map(|f| f.species())
}
/// Get all enums across all files
pub fn enums(&self) -> impl Iterator<Item = &ResolvedEnum> {
self.files.iter().flat_map(|f| f.enums())
}
/// Find a character by name
pub fn find_character(&self, name: &str) -> Option<&ResolvedCharacter> {
self.characters().find(|c| c.name == name)
}
/// Find a relationship by name
pub fn find_relationship(&self, name: &str) -> Option<&ResolvedRelationship> {
self.relationships().find(|r| r.name == name)
}
/// Find an institution by name
pub fn find_institution(&self, name: &str) -> Option<&ResolvedInstitution> {
self.institutions().find(|i| i.name == name)
}
}
#[cfg(test)]
mod tests {
use std::fs;
use tempfile::TempDir;
use super::*;
#[test]
fn test_build_action_registry_no_schema() {
let dir = TempDir::new().unwrap();
let registry = Project::build_action_registry(dir.path()).unwrap();
assert!(
registry.is_empty(),
"Registry should be empty when no schema exists"
);
}
#[test]
fn test_build_action_registry_with_schema() {
let dir = TempDir::new().unwrap();
let schema_dir = dir.path().join("schema");
fs::create_dir(&schema_dir).unwrap();
fs::write(
schema_dir.join("actions.sb"),
"enum Action { walk, work, eat, sleep }",
)
.unwrap();
let registry = Project::build_action_registry(dir.path()).unwrap();
assert_eq!(registry.len(), 4);
assert!(registry.contains("walk"));
assert!(registry.contains("work"));
assert!(registry.contains("eat"));
assert!(registry.contains("sleep"));
assert!(!registry.contains("unknown"));
}
#[test]
fn test_build_action_registry_from_file_path() {
let dir = TempDir::new().unwrap();
let schema_dir = dir.path().join("schema");
fs::create_dir(&schema_dir).unwrap();
fs::write(schema_dir.join("actions.sb"), "enum Action { walk, work }").unwrap();
// Create a test .sb file in the directory
let test_file = dir.path().join("test.sb");
fs::write(&test_file, "character Test { age: 30 }").unwrap();
// Pass the file path - should look for schema in parent directory
let registry = Project::build_action_registry(&test_file).unwrap();
assert_eq!(registry.len(), 2);
assert!(registry.contains("walk"));
assert!(registry.contains("work"));
}
#[test]
fn test_build_action_registry_ignores_other_enums() {
let dir = TempDir::new().unwrap();
let schema_dir = dir.path().join("schema");
fs::create_dir(&schema_dir).unwrap();
fs::write(
schema_dir.join("actions.sb"),
r#"
enum Action { walk, work }
enum OtherEnum { foo, bar, baz }
"#,
)
.unwrap();
let registry = Project::build_action_registry(dir.path()).unwrap();
// Should only contain Action enum variants
assert_eq!(registry.len(), 2);
assert!(registry.contains("walk"));
assert!(registry.contains("work"));
assert!(!registry.contains("foo"));
}
}

3
src/main.rs Normal file
View File

@@ -0,0 +1,3 @@
fn main() {
println!("Hello, world!");
}

288
src/query.rs Normal file
View File

@@ -0,0 +1,288 @@
//! Query interface for filtering and searching entities
//!
//! This module provides convenient methods for querying entities in a storybook
//! project. You can filter by various criteria like traits, age ranges, field
//! values, etc.
use crate::{
syntax::ast::Value,
types::*,
};
/// Extension methods for querying characters
pub trait CharacterQuery<'a> {
/// Filter characters by age range
fn with_age_range(
self,
min: i64,
max: i64,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a>;
/// Filter characters by trait value
fn with_trait(
self,
trait_name: &'a str,
min: f64,
max: f64,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a>;
/// Filter characters that have a specific field
fn with_field(
self,
field_name: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a>;
/// Filter characters by field value
fn with_field_value(
self,
field_name: &'a str,
value: Value,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a>;
}
impl<'a, I> CharacterQuery<'a> for I
where
I: Iterator<Item = &'a ResolvedCharacter> + 'a,
{
fn with_age_range(
self,
min: i64,
max: i64,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a> {
Box::new(self.filter(move |c| {
if let Some(Value::Int(age)) = c.fields.get("age") {
*age >= min && *age <= max
} else {
false
}
}))
}
fn with_trait(
self,
trait_name: &'a str,
min: f64,
max: f64,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a> {
Box::new(self.filter(move |c| {
if let Some(Value::Float(value)) = c.fields.get(trait_name) {
*value >= min && *value <= max
} else {
false
}
}))
}
fn with_field(
self,
field_name: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a> {
Box::new(self.filter(move |c| c.fields.contains_key(field_name)))
}
fn with_field_value(
self,
field_name: &'a str,
value: Value,
) -> Box<dyn Iterator<Item = &'a ResolvedCharacter> + 'a> {
Box::new(self.filter(move |c| c.fields.get(field_name) == Some(&value)))
}
}
/// Extension methods for querying relationships
pub trait RelationshipQuery<'a> {
/// Filter relationships by bond strength
fn with_bond_range(
self,
min: f64,
max: f64,
) -> Box<dyn Iterator<Item = &'a ResolvedRelationship> + 'a>;
/// Filter relationships that include a specific participant
fn with_participant(
self,
participant_name: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedRelationship> + 'a>;
/// Filter relationships that have a specific field
fn with_field(
self,
field_name: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedRelationship> + 'a>;
}
impl<'a, I> RelationshipQuery<'a> for I
where
I: Iterator<Item = &'a ResolvedRelationship> + 'a,
{
fn with_bond_range(
self,
min: f64,
max: f64,
) -> Box<dyn Iterator<Item = &'a ResolvedRelationship> + 'a> {
Box::new(self.filter(move |r| {
if let Some(Value::Float(bond)) = r.fields.get("bond") {
*bond >= min && *bond <= max
} else {
false
}
}))
}
fn with_participant(
self,
participant_name: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedRelationship> + 'a> {
Box::new(self.filter(move |r| {
r.participants
.iter()
.any(|p| p.name.last().is_some_and(|name| name == participant_name))
}))
}
fn with_field(
self,
field_name: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedRelationship> + 'a> {
Box::new(self.filter(move |r| r.fields.contains_key(field_name)))
}
}
/// Extension methods for querying schedules
pub trait ScheduleQuery<'a> {
/// Filter schedules that have an activity
fn with_activity(
self,
activity: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedSchedule> + 'a>;
}
impl<'a, I> ScheduleQuery<'a> for I
where
I: Iterator<Item = &'a ResolvedSchedule> + 'a,
{
fn with_activity(
self,
activity: &'a str,
) -> Box<dyn Iterator<Item = &'a ResolvedSchedule> + 'a> {
Box::new(self.filter(move |s| s.blocks.iter().any(|block| block.activity == activity)))
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::*;
use crate::syntax::ast::Span;
fn make_character(name: &str, age: i64, trust: f64) -> ResolvedCharacter {
let mut fields = HashMap::new();
fields.insert("age".to_string(), Value::Int(age));
fields.insert("trust".to_string(), Value::Float(trust));
ResolvedCharacter {
name: name.to_string(),
fields,
prose_blocks: HashMap::new(),
span: Span::new(0, 10),
}
}
#[test]
fn test_filter_by_age_range() {
let characters = [
make_character("Alice", 25, 0.8),
make_character("Bob", 35, 0.6),
make_character("Charlie", 45, 0.9),
];
let filtered: Vec<_> = characters.iter().with_age_range(30, 50).collect();
assert_eq!(filtered.len(), 2);
assert_eq!(filtered[0].name, "Bob");
assert_eq!(filtered[1].name, "Charlie");
}
#[test]
fn test_filter_by_trait() {
let characters = [
make_character("Alice", 25, 0.8),
make_character("Bob", 35, 0.6),
make_character("Charlie", 45, 0.9),
];
let filtered: Vec<_> = characters.iter().with_trait("trust", 0.75, 1.0).collect();
assert_eq!(filtered.len(), 2);
assert_eq!(filtered[0].name, "Alice");
assert_eq!(filtered[1].name, "Charlie");
}
#[test]
fn test_chain_filters() {
let characters = [
make_character("Alice", 25, 0.8),
make_character("Bob", 35, 0.6),
make_character("Charlie", 45, 0.9),
make_character("David", 40, 0.85),
];
// Find characters aged 30-50 with trust > 0.8
let filtered: Vec<_> = characters
.iter()
.with_age_range(30, 50)
.with_trait("trust", 0.8, 1.0)
.collect();
assert_eq!(filtered.len(), 2);
assert_eq!(filtered[0].name, "Charlie");
assert_eq!(filtered[1].name, "David");
}
#[test]
fn test_filter_with_field() {
let mut char1 = make_character("Alice", 25, 0.8);
char1
.fields
.insert("job".to_string(), Value::String("baker".to_string()));
let char2 = make_character("Bob", 35, 0.6);
let characters = [char1, char2];
let filtered: Vec<_> = characters.iter().with_field("job").collect();
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].name, "Alice");
}
#[test]
fn test_relationship_with_bond_range() {
let mut fields1 = HashMap::new();
fields1.insert("bond".to_string(), Value::Float(0.9));
let mut fields2 = HashMap::new();
fields2.insert("bond".to_string(), Value::Float(0.5));
let relationships = [
ResolvedRelationship {
name: "Strong".to_string(),
participants: vec![],
fields: fields1,
span: Span::new(0, 10),
},
ResolvedRelationship {
name: "Weak".to_string(),
participants: vec![],
fields: fields2,
span: Span::new(0, 10),
},
];
let filtered: Vec<_> = relationships.iter().with_bond_range(0.8, 1.0).collect();
assert_eq!(filtered.len(), 1);
assert_eq!(filtered[0].name, "Strong");
}
}

768
src/resolve/convert.rs Normal file
View File

@@ -0,0 +1,768 @@
//! Conversion from AST to resolved types
//!
//! This module handles converting parsed AST declarations into fully resolved
//! types that are ready for consumption by the game engine. It:
//! - Converts field vectors to HashMaps for efficient lookup
//! - Extracts prose blocks into separate collections
//! - Applies template overrides
//! - Validates that all references exist
use std::collections::HashMap;
use crate::{
resolve::{
merge,
names::NameTable,
validate,
ErrorCollector,
ResolveError,
Result,
},
syntax::ast::{
self,
ProseBlock,
Value,
},
types::*,
};
/// Convert a parsed file into resolved declarations
///
/// This is the old version that doesn't handle template composition.
/// Use `convert_file_with_templates` for full template support.
pub fn convert_file(file: &ast::File) -> Result<Vec<ResolvedDeclaration>> {
// Use the template-aware version
convert_file_with_templates(file)
}
/// Convert a parsed file into resolved declarations with template composition
/// support
pub fn convert_file_with_templates(file: &ast::File) -> Result<Vec<ResolvedDeclaration>> {
// Build name table for template lookups
let name_table = NameTable::from_file(file)?;
let mut resolved = Vec::new();
for decl in &file.declarations {
match decl {
| ast::Declaration::Character(c) => {
// Use template-aware conversion
let resolved_char =
convert_character_with_templates(c, &file.declarations, &name_table)?;
resolved.push(ResolvedDeclaration::Character(resolved_char));
},
| ast::Declaration::Template(t) => {
// Use include-aware conversion
let resolved_template =
convert_template_with_includes(t, &file.declarations, &name_table)?;
resolved.push(ResolvedDeclaration::Template(resolved_template));
},
| ast::Declaration::LifeArc(la) => {
resolved.push(ResolvedDeclaration::LifeArc(convert_life_arc(la)?));
},
| ast::Declaration::Schedule(s) => {
resolved.push(ResolvedDeclaration::Schedule(convert_schedule(s)?));
},
| ast::Declaration::Behavior(b) => {
resolved.push(ResolvedDeclaration::Behavior(convert_behavior(b)?));
},
| ast::Declaration::Institution(i) => {
resolved.push(ResolvedDeclaration::Institution(convert_institution(i)?));
},
| ast::Declaration::Relationship(r) => {
resolved.push(ResolvedDeclaration::Relationship(convert_relationship(r)?));
},
| ast::Declaration::Location(l) => {
resolved.push(ResolvedDeclaration::Location(convert_location(l)?));
},
| ast::Declaration::Species(s) => {
resolved.push(ResolvedDeclaration::Species(convert_species(s)?));
},
| ast::Declaration::Enum(e) => {
resolved.push(ResolvedDeclaration::Enum(convert_enum(e)?));
},
| ast::Declaration::Use(_) => {
// Use declarations are handled during name resolution, not
// conversion
},
}
}
Ok(resolved)
}
/// Convert character AST to resolved type
pub fn convert_character(character: &ast::Character) -> Result<ResolvedCharacter> {
let (fields, prose_blocks) = extract_fields_and_prose(&character.fields)?;
Ok(ResolvedCharacter {
name: character.name.clone(),
fields,
prose_blocks,
span: character.span.clone(),
})
}
/// Convert character AST to resolved type with template composition
///
/// This version resolves template inheritance by:
/// 1. Merging all templates listed in `from Template1, Template2`
/// 2. Recursively resolving template includes
/// 3. Validating strict mode requirements
/// 4. Applying character's own fields on top
pub fn convert_character_with_templates(
character: &ast::Character,
declarations: &[ast::Declaration],
name_table: &NameTable,
) -> Result<ResolvedCharacter> {
// Merge character templates if any
let merged_fields = if character.template.is_some() {
merge::merge_character_templates(character, declarations, name_table)?
} else {
character.fields.clone()
};
// Extract fields and prose blocks from merged result
let (fields, prose_blocks) = extract_fields_and_prose(&merged_fields)?;
Ok(ResolvedCharacter {
name: character.name.clone(),
fields,
prose_blocks,
span: character.span.clone(),
})
}
/// Convert template AST to resolved type
pub fn convert_template(template: &ast::Template) -> Result<ResolvedTemplate> {
let (fields, _) = extract_fields_and_prose(&template.fields)?;
Ok(ResolvedTemplate {
name: template.name.clone(),
fields,
span: template.span.clone(),
})
}
/// Convert template AST to resolved type with include resolution
///
/// This version resolves template includes by:
/// 1. Recursively resolving all included templates
/// 2. Merging included fields (later includes override earlier ones)
/// 3. Adding template's own fields on top
pub fn convert_template_with_includes(
template: &ast::Template,
declarations: &[ast::Declaration],
name_table: &NameTable,
) -> Result<ResolvedTemplate> {
// Resolve template includes if any
let merged_fields = if !template.includes.is_empty() {
let mut visited = std::collections::HashSet::new();
merge::resolve_template_includes(template, declarations, name_table, &mut visited)?
} else {
template.fields.clone()
};
// Extract fields (templates don't have prose blocks)
let (fields, _) = extract_fields_and_prose(&merged_fields)?;
Ok(ResolvedTemplate {
name: template.name.clone(),
fields,
span: template.span.clone(),
})
}
/// Convert life arc AST to resolved type
pub fn convert_life_arc(life_arc: &ast::LifeArc) -> Result<ResolvedLifeArc> {
let states = life_arc
.states
.iter()
.map(|state| ResolvedArcState {
name: state.name.clone(),
transitions: state.transitions.clone(),
span: state.span.clone(),
})
.collect();
Ok(ResolvedLifeArc {
name: life_arc.name.clone(),
states,
span: life_arc.span.clone(),
})
}
/// Convert schedule AST to resolved type
pub fn convert_schedule(schedule: &ast::Schedule) -> Result<ResolvedSchedule> {
let blocks = schedule
.blocks
.iter()
.map(|block| ResolvedScheduleBlock {
activity: block.activity.clone(),
start: block.start.clone(),
end: block.end.clone(),
span: block.span.clone(),
})
.collect();
Ok(ResolvedSchedule {
name: schedule.name.clone(),
blocks,
span: schedule.span.clone(),
})
}
/// Convert behavior tree AST to resolved type
pub fn convert_behavior(behavior: &ast::Behavior) -> Result<ResolvedBehavior> {
Ok(ResolvedBehavior {
name: behavior.name.clone(),
root: behavior.root.clone(),
span: behavior.span.clone(),
})
}
/// Convert institution AST to resolved type
pub fn convert_institution(institution: &ast::Institution) -> Result<ResolvedInstitution> {
let (fields, _) = extract_fields_and_prose(&institution.fields)?;
Ok(ResolvedInstitution {
name: institution.name.clone(),
fields,
span: institution.span.clone(),
})
}
/// Convert relationship AST to resolved type
pub fn convert_relationship(relationship: &ast::Relationship) -> Result<ResolvedRelationship> {
let (fields, _) = extract_fields_and_prose(&relationship.fields)?;
Ok(ResolvedRelationship {
name: relationship.name.clone(),
participants: relationship.participants.clone(),
fields,
span: relationship.span.clone(),
})
}
/// Convert location AST to resolved type
pub fn convert_location(location: &ast::Location) -> Result<ResolvedLocation> {
let (fields, _) = extract_fields_and_prose(&location.fields)?;
Ok(ResolvedLocation {
name: location.name.clone(),
fields,
span: location.span.clone(),
})
}
/// Convert species AST to resolved type
pub fn convert_species(species: &ast::Species) -> Result<ResolvedSpecies> {
let (fields, _) = extract_fields_and_prose(&species.fields)?;
Ok(ResolvedSpecies {
name: species.name.clone(),
fields,
span: species.span.clone(),
})
}
/// Convert enum AST to resolved type
pub fn convert_enum(enum_decl: &ast::EnumDecl) -> Result<ResolvedEnum> {
Ok(ResolvedEnum {
name: enum_decl.name.clone(),
variants: enum_decl.variants.clone(),
span: enum_decl.span.clone(),
})
}
/// Extract fields and prose blocks from a field list
///
/// Returns (fields_map, prose_blocks_map)
fn extract_fields_and_prose(
fields: &[ast::Field],
) -> Result<(HashMap<String, Value>, HashMap<String, ProseBlock>)> {
let mut fields_map = HashMap::new();
let mut prose_map = HashMap::new();
let mut collector = ErrorCollector::new();
// Validate no reserved keywords
validate::validate_no_reserved_keywords(fields, &mut collector);
for field in fields {
// Check for duplicate field names
if fields_map.contains_key(&field.name) || prose_map.contains_key(&field.name) {
collector.add(ResolveError::ValidationError {
message: format!("Duplicate field name: '{}'", field.name),
help: Some(format!(
"Each field name must be unique. The field '{}' appears more than once. Remove or rename the duplicate field.",
field.name
)),
});
continue; // Continue collecting errors
}
match &field.value {
| Value::ProseBlock(prose) => {
prose_map.insert(field.name.clone(), prose.clone());
},
| value => {
fields_map.insert(field.name.clone(), value.clone());
},
}
}
// Return any accumulated errors
collector.into_result((fields_map, prose_map))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::ast::{
Character,
EnumDecl,
Field,
Span,
};
#[test]
fn test_convert_simple_character() {
let character = Character {
name: "Martha".to_string(),
fields: vec![
Field {
name: "age".to_string(),
value: Value::Int(34),
span: Span::new(0, 10),
},
Field {
name: "health".to_string(),
value: Value::Float(0.8),
span: Span::new(10, 20),
},
],
template: None,
span: Span::new(0, 50),
};
let resolved = convert_character(&character).unwrap();
assert_eq!(resolved.name, "Martha");
assert_eq!(resolved.fields.len(), 2);
assert_eq!(resolved.fields.get("age"), Some(&Value::Int(34)));
assert_eq!(resolved.fields.get("health"), Some(&Value::Float(0.8)));
assert_eq!(resolved.prose_blocks.len(), 0);
}
#[test]
fn test_convert_character_with_prose() {
let prose_block = ProseBlock {
tag: "backstory".to_string(),
content: "Martha grew up in a small town.".to_string(),
span: Span::new(10, 50),
};
let character = Character {
name: "Martha".to_string(),
fields: vec![
Field {
name: "age".to_string(),
value: Value::Int(34),
span: Span::new(0, 10),
},
Field {
name: "backstory".to_string(),
value: Value::ProseBlock(prose_block.clone()),
span: Span::new(10, 50),
},
],
template: None,
span: Span::new(0, 100),
};
let resolved = convert_character(&character).unwrap();
assert_eq!(resolved.name, "Martha");
assert_eq!(resolved.fields.len(), 1);
assert_eq!(resolved.fields.get("age"), Some(&Value::Int(34)));
assert_eq!(resolved.prose_blocks.len(), 1);
assert_eq!(resolved.prose_blocks.get("backstory"), Some(&prose_block));
}
#[test]
fn test_convert_character_duplicate_field_fails() {
let character = Character {
name: "Martha".to_string(),
fields: vec![
Field {
name: "age".to_string(),
value: Value::Int(34),
span: Span::new(0, 10),
},
Field {
name: "age".to_string(),
value: Value::Int(35),
span: Span::new(10, 20),
},
],
template: None,
span: Span::new(0, 50),
};
let result = convert_character(&character);
assert!(result.is_err());
}
#[test]
fn test_convert_enum() {
let enum_decl = EnumDecl {
name: "Status".to_string(),
variants: vec!["active".to_string(), "inactive".to_string()],
span: Span::new(0, 50),
};
let resolved = convert_enum(&enum_decl).unwrap();
assert_eq!(resolved.name, "Status");
assert_eq!(resolved.variants.len(), 2);
assert_eq!(resolved.variants[0], "active");
assert_eq!(resolved.variants[1], "inactive");
}
#[test]
fn test_convert_file_mixed_declarations() {
let file = ast::File {
declarations: vec![
ast::Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![Field {
name: "age".to_string(),
value: Value::Int(34),
span: Span::new(0, 10),
}],
template: None,
span: Span::new(0, 50),
}),
ast::Declaration::Enum(EnumDecl {
name: "Status".to_string(),
variants: vec!["active".to_string()],
span: Span::new(50, 100),
}),
],
};
let resolved = convert_file(&file).unwrap();
assert_eq!(resolved.len(), 2);
match &resolved[0] {
| ResolvedDeclaration::Character(c) => assert_eq!(c.name, "Martha"),
| _ => panic!("Expected Character"),
}
match &resolved[1] {
| ResolvedDeclaration::Enum(e) => assert_eq!(e.name, "Status"),
| _ => panic!("Expected Enum"),
}
}
#[test]
fn test_convert_file_skips_use_declarations() {
let file = ast::File {
declarations: vec![
ast::Declaration::Use(ast::UseDecl {
path: vec!["foo".to_string()],
kind: ast::UseKind::Wildcard,
span: Span::new(0, 10),
}),
ast::Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(10, 50),
}),
],
};
let resolved = convert_file(&file).unwrap();
// Should only have the character, not the use declaration
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Character(c) => assert_eq!(c.name, "Martha"),
| _ => panic!("Expected Character"),
}
}
#[test]
fn test_extract_fields_and_prose_empty() {
let (fields, prose) = extract_fields_and_prose(&[]).unwrap();
assert_eq!(fields.len(), 0);
assert_eq!(prose.len(), 0);
}
#[test]
fn test_extract_fields_and_prose_mixed() {
let prose_block = ProseBlock {
tag: "description".to_string(),
content: "Test content".to_string(),
span: Span::new(10, 30),
};
let fields = vec![
Field {
name: "age".to_string(),
value: Value::Int(30),
span: Span::new(0, 10),
},
Field {
name: "description".to_string(),
value: Value::ProseBlock(prose_block.clone()),
span: Span::new(10, 30),
},
Field {
name: "active".to_string(),
value: Value::Bool(true),
span: Span::new(30, 40),
},
];
let (field_map, prose_map) = extract_fields_and_prose(&fields).unwrap();
assert_eq!(field_map.len(), 2);
assert_eq!(field_map.get("age"), Some(&Value::Int(30)));
assert_eq!(field_map.get("active"), Some(&Value::Bool(true)));
assert_eq!(prose_map.len(), 1);
assert_eq!(prose_map.get("description"), Some(&prose_block));
}
// ===== Template Composition Integration Tests =====
#[test]
fn test_convert_character_with_single_template() {
use crate::resolve::names::NameTable;
let template = ast::Template {
name: "Person".to_string(),
fields: vec![Field {
name: "type".to_string(), // Changed from "species"
value: Value::String("human".to_string()),
span: Span::new(0, 10),
}],
strict: false,
includes: vec![],
span: Span::new(0, 50),
};
let character = Character {
name: "Martha".to_string(),
fields: vec![Field {
name: "age".to_string(),
value: Value::Int(34),
span: Span::new(0, 10),
}],
template: Some(vec!["Person".to_string()]),
span: Span::new(0, 100),
};
let declarations = vec![
ast::Declaration::Template(template),
ast::Declaration::Character(character.clone()),
];
let file = ast::File {
declarations: declarations.clone(),
};
let name_table = NameTable::from_file(&file).unwrap();
let resolved =
convert_character_with_templates(&character, &declarations, &name_table).unwrap();
assert_eq!(resolved.name, "Martha");
assert_eq!(resolved.fields.len(), 2);
assert_eq!(resolved.fields.get("age"), Some(&Value::Int(34)));
assert_eq!(
resolved.fields.get("type"),
Some(&Value::String("human".to_string()))
);
}
#[test]
fn test_convert_character_with_multiple_templates() {
use crate::resolve::names::NameTable;
let physical = ast::Template {
name: "Physical".to_string(),
fields: vec![Field {
name: "height".to_string(),
value: Value::Int(0),
span: Span::new(0, 10),
}],
strict: false,
includes: vec![],
span: Span::new(0, 50),
};
let mental = ast::Template {
name: "Mental".to_string(),
fields: vec![Field {
name: "iq".to_string(),
value: Value::Int(0),
span: Span::new(0, 10),
}],
strict: false,
includes: vec![],
span: Span::new(0, 50),
};
let character = Character {
name: "Martha".to_string(),
fields: vec![
Field {
name: "height".to_string(),
value: Value::Int(165),
span: Span::new(0, 10),
},
Field {
name: "iq".to_string(),
value: Value::Int(120),
span: Span::new(10, 20),
},
],
template: Some(vec!["Physical".to_string(), "Mental".to_string()]),
span: Span::new(0, 100),
};
let declarations = vec![
ast::Declaration::Template(physical),
ast::Declaration::Template(mental),
ast::Declaration::Character(character.clone()),
];
let file = ast::File {
declarations: declarations.clone(),
};
let name_table = NameTable::from_file(&file).unwrap();
let resolved =
convert_character_with_templates(&character, &declarations, &name_table).unwrap();
assert_eq!(resolved.name, "Martha");
assert_eq!(resolved.fields.len(), 2);
assert_eq!(resolved.fields.get("height"), Some(&Value::Int(165)));
assert_eq!(resolved.fields.get("iq"), Some(&Value::Int(120)));
}
#[test]
fn test_convert_template_with_includes() {
use crate::resolve::names::NameTable;
let base = ast::Template {
name: "Human".to_string(),
fields: vec![Field {
name: "type".to_string(), // Changed from "species"
value: Value::String("human".to_string()),
span: Span::new(0, 10),
}],
strict: false,
includes: vec![],
span: Span::new(0, 50),
};
let derived = ast::Template {
name: "Person".to_string(),
fields: vec![Field {
name: "age".to_string(),
value: Value::Int(0),
span: Span::new(0, 10),
}],
strict: false,
includes: vec!["Human".to_string()],
span: Span::new(0, 50),
};
let declarations = vec![
ast::Declaration::Template(base),
ast::Declaration::Template(derived.clone()),
];
let file = ast::File {
declarations: declarations.clone(),
};
let name_table = NameTable::from_file(&file).unwrap();
let resolved =
convert_template_with_includes(&derived, &declarations, &name_table).unwrap();
assert_eq!(resolved.name, "Person");
assert_eq!(resolved.fields.len(), 2);
assert_eq!(resolved.fields.get("age"), Some(&Value::Int(0)));
assert_eq!(
resolved.fields.get("type"),
Some(&Value::String("human".to_string()))
);
}
#[test]
fn test_convert_character_reserved_keyword_fails() {
let character = Character {
name: "Martha".to_string(),
fields: vec![Field {
name: "species".to_string(), // Reserved keyword!
value: Value::String("human".to_string()),
span: Span::new(0, 10),
}],
template: None,
span: Span::new(0, 50),
};
let result = convert_character(&character);
assert!(result.is_err());
if let Err(ResolveError::ValidationError { message, help }) = result {
assert!(message.contains("reserved keyword"));
assert!(message.contains("species"));
assert!(help.is_some());
} else {
panic!("Expected ValidationError for reserved keyword");
}
}
#[test]
fn test_convert_character_strict_mode_validation() {
use crate::resolve::names::NameTable;
let template = ast::Template {
name: "Person".to_string(),
fields: vec![Field {
name: "age".to_string(),
value: Value::Range(Box::new(Value::Int(18)), Box::new(Value::Int(65))),
span: Span::new(0, 10),
}],
strict: true,
includes: vec![],
span: Span::new(0, 50),
};
let character = Character {
name: "Martha".to_string(),
fields: vec![], // No fields - inherits range from template
template: Some(vec!["Person".to_string()]),
span: Span::new(0, 100),
};
let declarations = vec![
ast::Declaration::Template(template),
ast::Declaration::Character(character.clone()),
];
let file = ast::File {
declarations: declarations.clone(),
};
let name_table = NameTable::from_file(&file).unwrap();
let result = convert_character_with_templates(&character, &declarations, &name_table);
assert!(result.is_err());
if let Err(ResolveError::ValidationError { message, .. }) = result {
assert!(message.contains("strict template"));
}
}
}

View File

@@ -0,0 +1,407 @@
//! Integration tests for the full conversion pipeline
use crate::{
resolve::convert::convert_file,
syntax::{
ast::*,
FileParser,
},
types::*,
};
/// Helper to parse and convert a source string
fn parse_and_convert(source: &str) -> Result<Vec<ResolvedDeclaration>, Box<dyn std::error::Error>> {
let lexer = crate::syntax::lexer::Lexer::new(source);
let file = FileParser::new().parse(lexer)?;
Ok(convert_file(&file)?)
}
#[test]
fn test_simple_character_end_to_end() {
let source = r#"
character Martha {
age: 34
health: 0.8
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Character(c) => {
assert_eq!(c.name, "Martha");
assert_eq!(c.fields.len(), 2);
assert_eq!(c.fields.get("age"), Some(&Value::Int(34)));
assert_eq!(c.fields.get("health"), Some(&Value::Float(0.8)));
},
| _ => panic!("Expected Character"),
}
}
#[test]
fn test_character_with_prose_end_to_end() {
let source = r#"
character Martha {
age: 34
backstory: ---backstory
Martha grew up in a small town.
She loved baking from a young age.
---
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Character(c) => {
assert_eq!(c.name, "Martha");
assert_eq!(c.fields.len(), 1);
assert_eq!(c.fields.get("age"), Some(&Value::Int(34)));
assert_eq!(c.prose_blocks.len(), 1);
let backstory = c.prose_blocks.get("backstory").unwrap();
assert_eq!(backstory.tag, "backstory");
assert!(backstory.content.contains("Martha grew up"));
},
| _ => panic!("Expected Character"),
}
}
#[test]
fn test_multiple_declarations_end_to_end() {
let source = r#"
character Martha {
age: 34
}
character David {
age: 36
}
enum Status {
active, inactive, pending
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 3);
let char_count = resolved
.iter()
.filter(|d| matches!(d, ResolvedDeclaration::Character(_)))
.count();
let enum_count = resolved
.iter()
.filter(|d| matches!(d, ResolvedDeclaration::Enum(_)))
.count();
assert_eq!(char_count, 2);
assert_eq!(enum_count, 1);
}
#[test]
fn test_relationship_end_to_end() {
let source = r#"
relationship Spousal {
Martha
David
bond: 0.9
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Relationship(r) => {
assert_eq!(r.name, "Spousal");
assert_eq!(r.participants.len(), 2);
assert_eq!(r.fields.get("bond"), Some(&Value::Float(0.9)));
},
| _ => panic!("Expected Relationship"),
}
}
#[test]
fn test_life_arc_end_to_end() {
let source = r#"
life_arc Growth {
state child {
on age > 12 -> teen
}
state teen {
on age > 18 -> adult
}
state adult {}
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::LifeArc(la) => {
assert_eq!(la.name, "Growth");
assert_eq!(la.states.len(), 3);
assert_eq!(la.states[0].name, "child");
assert_eq!(la.states[1].name, "teen");
assert_eq!(la.states[2].name, "adult");
},
| _ => panic!("Expected LifeArc"),
}
}
#[test]
fn test_behavior_tree_end_to_end() {
let source = r#"
behavior WorkAtBakery {
> {
walk
work(duration: 8h)
rest
}
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Behavior(b) => {
assert_eq!(b.name, "WorkAtBakery");
// Root should be a Sequence node
assert!(matches!(b.root, BehaviorNode::Sequence(_)));
},
| _ => panic!("Expected Behavior"),
}
}
#[test]
fn test_schedule_end_to_end() {
let source = r#"
schedule DailyRoutine {
08:00 -> 12:00: work
12:00 -> 13:00: lunch
13:00 -> 17:00: work
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Schedule(s) => {
assert_eq!(s.name, "DailyRoutine");
assert_eq!(s.blocks.len(), 3);
assert_eq!(s.blocks[0].activity, "work");
assert_eq!(s.blocks[1].activity, "lunch");
assert_eq!(s.blocks[2].activity, "work");
},
| _ => panic!("Expected Schedule"),
}
}
#[test]
fn test_institution_end_to_end() {
let source = r#"
institution Bakery {
employees: 5
revenue: 50000
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Institution(i) => {
assert_eq!(i.name, "Bakery");
assert_eq!(i.fields.get("employees"), Some(&Value::Int(5)));
assert_eq!(i.fields.get("revenue"), Some(&Value::Int(50000)));
},
| _ => panic!("Expected Institution"),
}
}
#[test]
fn test_location_end_to_end() {
let source = r#"
location Bakery {
x: 100
y: 200
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Location(l) => {
assert_eq!(l.name, "Bakery");
assert_eq!(l.fields.get("x"), Some(&Value::Int(100)));
assert_eq!(l.fields.get("y"), Some(&Value::Int(200)));
},
| _ => panic!("Expected Location"),
}
}
#[test]
fn test_species_end_to_end() {
let source = r#"
species Human {
lifespan: 80
intelligence: 0.9
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Species(s) => {
assert_eq!(s.name, "Human");
assert_eq!(s.fields.get("lifespan"), Some(&Value::Int(80)));
assert_eq!(s.fields.get("intelligence"), Some(&Value::Float(0.9)));
},
| _ => panic!("Expected Species"),
}
}
#[test]
fn test_template_end_to_end() {
let source = r#"
template Adult {
age: 20..60
health: 0.5..1.0
}
"#;
let resolved = parse_and_convert(source).unwrap();
assert_eq!(resolved.len(), 1);
match &resolved[0] {
| ResolvedDeclaration::Template(t) => {
assert_eq!(t.name, "Adult");
assert_eq!(t.fields.len(), 2);
// Templates keep their range values
assert!(matches!(t.fields.get("age"), Some(Value::Range(_, _))));
},
| _ => panic!("Expected Template"),
}
}
#[test]
fn test_use_declarations_are_skipped() {
let source = r#"
use characters::*;
character Martha {
age: 34
}
"#;
let resolved = parse_and_convert(source).unwrap();
// Should only have character, not use declaration
assert_eq!(resolved.len(), 1);
assert!(matches!(resolved[0], ResolvedDeclaration::Character(_)));
}
#[test]
fn test_complex_mixed_file() {
let source = r#"
use relationships::*;
character Martha {
age: 34
backstory: ---backstory
Martha grew up in a small town.
---
}
character David {
age: 36
}
relationship Spousal {
Martha
David
bond: 0.9
}
enum BondType {
romantic, familial, friendship
}
schedule DailyRoutine {
08:00 -> 12:00: work
12:00 -> 13:00: lunch
}
"#;
let resolved = parse_and_convert(source).unwrap();
// Count each type
let chars = resolved
.iter()
.filter(|d| matches!(d, ResolvedDeclaration::Character(_)))
.count();
let rels = resolved
.iter()
.filter(|d| matches!(d, ResolvedDeclaration::Relationship(_)))
.count();
let enums = resolved
.iter()
.filter(|d| matches!(d, ResolvedDeclaration::Enum(_)))
.count();
let scheds = resolved
.iter()
.filter(|d| matches!(d, ResolvedDeclaration::Schedule(_)))
.count();
assert_eq!(chars, 2);
assert_eq!(rels, 1);
assert_eq!(enums, 1);
assert_eq!(scheds, 1);
assert_eq!(resolved.len(), 5); // Total, excluding use declaration
}
#[test]
fn test_duplicate_field_names_error() {
let source = r#"
character Martha {
age: 34
age: 35
}
"#;
let result = parse_and_convert(source);
assert!(result.is_err(), "Duplicate field names should cause error");
}
#[test]
fn test_all_value_types_convert() {
let source = r#"
character Test {
int_val: 42
float_val: 3.5
bool_val: true
string_val: "hello"
}
"#;
let resolved = parse_and_convert(source).unwrap();
match &resolved[0] {
| ResolvedDeclaration::Character(c) => {
assert_eq!(c.fields.get("int_val"), Some(&Value::Int(42)));
assert_eq!(c.fields.get("float_val"), Some(&Value::Float(3.5)));
assert_eq!(c.fields.get("bool_val"), Some(&Value::Bool(true)));
assert_eq!(
c.fields.get("string_val"),
Some(&Value::String("hello".to_string()))
);
},
| _ => panic!("Expected Character"),
}
}

View File

@@ -0,0 +1,371 @@
//! Property tests for AST to resolved type conversion
use proptest::prelude::*;
use crate::{
resolve::convert::{
convert_character,
convert_enum,
convert_file,
},
syntax::ast::*,
};
// ===== Generators =====
// Reserved keywords that cannot be used as field names
const RESERVED_KEYWORDS: &[&str] = &[
"character",
"template",
"life_arc",
"schedule",
"behavior",
"institution",
"relationship",
"location",
"species",
"enum",
"use",
"state",
"on",
"as",
"remove",
"append",
"strict",
"include",
"from",
"self",
"other",
"forall",
"exists",
"in",
"where",
"and",
"or",
"not",
"is",
"true",
"false",
];
fn valid_ident() -> impl Strategy<Value = String> {
"[a-zA-Z_][a-zA-Z0-9_]{0,15}"
.prop_filter("not reserved", |s| !RESERVED_KEYWORDS.contains(&s.as_str()))
}
fn valid_value() -> impl Strategy<Value = Value> {
prop_oneof![
(-1000i64..1000).prop_map(Value::Int),
(-1000.0..1000.0)
.prop_filter("finite", |f: &f64| f.is_finite())
.prop_map(Value::Float),
any::<bool>().prop_map(Value::Bool),
"[a-zA-Z0-9 ]{0,30}".prop_map(Value::String),
]
}
fn valid_field() -> impl Strategy<Value = Field> {
(valid_ident(), valid_value()).prop_map(|(name, value)| Field {
name,
value,
span: Span::new(0, 10),
})
}
fn valid_unique_fields() -> impl Strategy<Value = Vec<Field>> {
prop::collection::vec(valid_field(), 0..10).prop_map(|fields| {
let mut unique_fields = Vec::new();
let mut seen_names = std::collections::HashSet::new();
for field in fields {
if seen_names.insert(field.name.clone()) {
unique_fields.push(field);
}
}
unique_fields
})
}
fn valid_character() -> impl Strategy<Value = Character> {
(valid_ident(), valid_unique_fields()).prop_map(|(name, fields)| Character {
name,
fields,
template: None,
span: Span::new(0, 100),
})
}
fn valid_enum() -> impl Strategy<Value = EnumDecl> {
(valid_ident(), prop::collection::vec(valid_ident(), 1..10)).prop_map(|(name, variants)| {
EnumDecl {
name,
variants,
span: Span::new(0, 100),
}
})
}
// ===== Property Tests =====
proptest! {
#[test]
fn test_character_name_preserved(character in valid_character()) {
let original_name = character.name.clone();
let resolved = convert_character(&character).unwrap();
assert_eq!(resolved.name, original_name);
}
#[test]
fn test_character_field_count_preserved(character in valid_character()) {
let original_count = character.fields.len();
let resolved = convert_character(&character).unwrap();
let total_count = resolved.fields.len() + resolved.prose_blocks.len();
assert_eq!(total_count, original_count);
}
#[test]
fn test_character_field_values_preserved(character in valid_character()) {
let resolved = convert_character(&character).unwrap();
for field in &character.fields {
match &field.value {
| Value::ProseBlock(_) => {
assert!(resolved.prose_blocks.contains_key(&field.name));
},
| value => {
assert_eq!(resolved.fields.get(&field.name), Some(value));
},
}
}
}
#[test]
fn test_enum_name_preserved(enum_decl in valid_enum()) {
let original_name = enum_decl.name.clone();
let resolved = convert_enum(&enum_decl).unwrap();
assert_eq!(resolved.name, original_name);
}
#[test]
fn test_enum_variants_preserved(enum_decl in valid_enum()) {
let resolved = convert_enum(&enum_decl).unwrap();
assert_eq!(resolved.variants.len(), enum_decl.variants.len());
for (i, variant) in enum_decl.variants.iter().enumerate() {
assert_eq!(&resolved.variants[i], variant);
}
}
#[test]
fn test_convert_file_preserves_declaration_count(
characters in prop::collection::vec(valid_character(), 0..5),
enums in prop::collection::vec(valid_enum(), 0..5)
) {
// Ensure unique names across all declarations to avoid duplicate definition errors
let mut seen_names = std::collections::HashSet::new();
let mut declarations = Vec::new();
for char in characters {
if seen_names.insert(char.name.clone()) {
declarations.push(Declaration::Character(char));
}
}
for enum_decl in enums {
if seen_names.insert(enum_decl.name.clone()) {
declarations.push(Declaration::Enum(enum_decl));
}
}
let file = File { declarations: declarations.clone() };
let resolved = convert_file(&file).unwrap();
// Should have same count (excluding Use declarations)
assert_eq!(resolved.len(), declarations.len());
}
#[test]
fn test_duplicate_field_names_rejected(
name in valid_ident(),
field_name in valid_ident(),
val1 in valid_value(),
val2 in valid_value()
) {
let character = Character {
name,
fields: vec![
Field {
name: field_name.clone(),
value: val1,
span: Span::new(0, 10),
},
Field {
name: field_name,
value: val2,
span: Span::new(10, 20),
},
],
template: None,
span: Span::new(0, 50),
};
let result = convert_character(&character);
assert!(result.is_err(), "Duplicate field names should be rejected");
}
#[test]
fn test_field_lookup_is_efficient(character in valid_character()) {
let resolved = convert_character(&character).unwrap();
// All fields should be directly accessible in O(1)
for field in &character.fields {
if matches!(field.value, Value::ProseBlock(_)) {
assert!(
resolved.prose_blocks.contains_key(&field.name),
"Prose block {} should be in map",
field.name
);
} else {
assert!(
resolved.fields.contains_key(&field.name),
"Field {} should be in map",
field.name
);
}
}
}
#[test]
fn test_empty_character_converts(name in valid_ident()) {
let character = Character {
name: name.clone(),
fields: vec![],
template: None,
span: Span::new(0, 10),
};
let resolved = convert_character(&character).unwrap();
assert_eq!(resolved.name, name);
assert_eq!(resolved.fields.len(), 0);
assert_eq!(resolved.prose_blocks.len(), 0);
}
#[test]
fn test_conversion_is_deterministic(character in valid_character()) {
let resolved1 = convert_character(&character).unwrap();
let resolved2 = convert_character(&character).unwrap();
assert_eq!(resolved1.name, resolved2.name);
assert_eq!(resolved1.fields.len(), resolved2.fields.len());
assert_eq!(resolved1.prose_blocks.len(), resolved2.prose_blocks.len());
// All fields should match
for (key, value) in &resolved1.fields {
assert_eq!(resolved2.fields.get(key), Some(value));
}
}
#[test]
fn test_file_with_use_declarations_skips_them(
characters in prop::collection::vec(valid_character(), 1..5),
use_count in 0usize..5
) {
let mut declarations = vec![];
// Add some use declarations
for i in 0..use_count {
declarations.push(Declaration::Use(UseDecl {
path: vec![format!("module{}", i)],
kind: UseKind::Wildcard,
span: Span::new(0, 10),
}));
}
// Add characters
let char_count = characters.len();
declarations.extend(characters.into_iter().map(Declaration::Character));
let file = File { declarations };
let resolved = convert_file(&file).unwrap();
// Should only have characters, not use declarations
assert_eq!(resolved.len(), char_count);
}
}
#[cfg(test)]
mod edge_cases {
use super::*;
proptest! {
#[test]
fn test_all_value_types_convert(
int_val in -1000i64..1000,
float_val in -1000.0..1000.0,
bool_val in any::<bool>(),
string_val in "[a-zA-Z0-9 ]{1,30}"
) {
let character = Character {
name: "Test".to_string(),
fields: vec![
Field {
name: "int_field".to_string(),
value: Value::Int(int_val),
span: Span::new(0, 10),
},
Field {
name: "float_field".to_string(),
value: Value::Float(float_val),
span: Span::new(10, 20),
},
Field {
name: "bool_field".to_string(),
value: Value::Bool(bool_val),
span: Span::new(20, 30),
},
Field {
name: "string_field".to_string(),
value: Value::String(string_val.clone()),
span: Span::new(30, 40),
},
],
template: None,
span: Span::new(0, 50),
};
let resolved = convert_character(&character).unwrap();
assert_eq!(resolved.fields.get("int_field"), Some(&Value::Int(int_val)));
assert_eq!(resolved.fields.get("float_field"), Some(&Value::Float(float_val)));
assert_eq!(resolved.fields.get("bool_field"), Some(&Value::Bool(bool_val)));
assert_eq!(resolved.fields.get("string_field"), Some(&Value::String(string_val)));
}
#[test]
fn test_unicode_in_names_and_values(
name in "[a-zA-Z_\u{0080}-\u{00FF}]{1,20}",
field_name in "[a-zA-Z_\u{0080}-\u{00FF}]{1,20}".prop_filter("not reserved", |s| {
!RESERVED_KEYWORDS.contains(&s.as_str())
}),
string_val in "[a-zA-Z0-9 \u{0080}-\u{00FF}]{0,30}"
) {
let character = Character {
name: name.clone(),
fields: vec![Field {
name: field_name.clone(),
value: Value::String(string_val.clone()),
span: Span::new(0, 10),
}],
template: None,
span: Span::new(0, 50),
};
let resolved = convert_character(&character).unwrap();
assert_eq!(resolved.name, name);
assert_eq!(
resolved.fields.get(&field_name),
Some(&Value::String(string_val))
);
}
}
}

View File

@@ -0,0 +1,164 @@
//! Integration tests for the resolution engine
use crate::{
resolve::names::{
DeclKind,
NameTable,
},
syntax::{
lexer::Lexer,
FileParser,
},
};
fn parse(source: &str) -> crate::syntax::ast::File {
let lexer = Lexer::new(source);
let parser = FileParser::new();
parser.parse(lexer).expect("Should parse successfully")
}
#[test]
fn test_name_resolution_example_file() {
let source = r#"
character Alice {
age: 30
}
character Bob {
age: 35
}
template PersonTemplate {
age: 18..80
}
enum Status {
active,
inactive
}
"#;
let file = parse(source);
let table = NameTable::from_file(&file).expect("Should build name table");
// Verify all names are registered
assert!(table.lookup(&["Alice".to_string()]).is_some());
assert!(table.lookup(&["Bob".to_string()]).is_some());
assert!(table.lookup(&["PersonTemplate".to_string()]).is_some());
assert!(table.lookup(&["Status".to_string()]).is_some());
// Verify kind filtering
assert_eq!(table.entries_of_kind(DeclKind::Character).count(), 2);
assert_eq!(table.entries_of_kind(DeclKind::Template).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Enum).count(), 1);
}
#[test]
fn test_use_statements_are_parsed() {
let source = r#"
use characters::Martha;
use templates::{Person, NPC};
use locations::*;
character LocalChar {
age: 25
}
"#;
let file = parse(source);
let table = NameTable::from_file(&file).expect("Should build name table");
// Verify imports were collected
assert_eq!(table.imports().len(), 3);
// Verify local declaration is registered
assert!(table.lookup(&["LocalChar".to_string()]).is_some());
}
#[test]
fn test_duplicate_name_error() {
let source = r#"
character Martha {
age: 30
}
character Martha {
age: 35
}
"#;
let file = parse(source);
let result = NameTable::from_file(&file);
// Should fail with duplicate error
assert!(result.is_err());
}
#[test]
fn test_fuzzy_matching_suggestion() {
let source = r#"
character Elizabeth {
age: 30
}
"#;
let file = parse(source);
let table = NameTable::from_file(&file).expect("Should build name table");
// Typo "Elizabet" should suggest "Elizabeth"
let suggestion = table.find_suggestion("Elizabet");
assert_eq!(suggestion, Some("Elizabeth".to_string()));
// Typo "Elizabth" should also suggest "Elizabeth"
let suggestion = table.find_suggestion("Elizabth");
assert_eq!(suggestion, Some("Elizabeth".to_string()));
}
#[test]
fn test_all_declaration_kinds() {
let source = r#"
character C { age: 1 }
template T { age: 1..2 }
life_arc L {
state s {}
}
schedule S {
10:00 -> 11:00: activity
}
behavior B {
action
}
institution I {
name: "Test"
}
relationship R {
C
C
}
location Loc {
name: "Place"
}
species Sp {
lifespan: 100
}
enum E {
a,
b
}
"#;
let file = parse(source);
let table = NameTable::from_file(&file).expect("Should build name table");
// All 10 declaration kinds should be represented
assert_eq!(table.entries_of_kind(DeclKind::Character).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Template).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::LifeArc).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Schedule).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Behavior).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Institution).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Relationship).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Location).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Species).count(), 1);
assert_eq!(table.entries_of_kind(DeclKind::Enum).count(), 1);
}

325
src/resolve/links.rs Normal file
View File

@@ -0,0 +1,325 @@
//! Bidirectional relationship resolution
//!
//! Handles relationships that can be declared from either participant's
//! perspective, merging self/other blocks and validating consistency.
use std::collections::HashMap;
use crate::{
resolve::{
ResolveError,
Result,
},
syntax::ast::{
Declaration,
Field,
File,
Participant,
Relationship,
},
};
/// A relationship key that's order-independent
/// (Martha, David) and (David, Martha) map to the same key
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct RelationshipKey {
participants: Vec<String>,
name: String,
}
impl RelationshipKey {
fn new(mut participants: Vec<String>, name: String) -> Self {
// Sort participants to make key order-independent
participants.sort();
Self { participants, name }
}
}
/// Information about a relationship declaration
#[derive(Debug, Clone)]
struct RelationshipDecl {
relationship: Relationship,
/// Which participant is "self" (index into participants)
self_index: Option<usize>,
}
/// Resolved bidirectional relationship
#[derive(Debug, Clone)]
pub struct ResolvedRelationship {
pub name: String,
pub participants: Vec<Participant>,
pub fields: Vec<Field>,
/// Merged self/other blocks for each participant
pub participant_fields: Vec<ParticipantFields>,
}
#[derive(Debug, Clone)]
pub struct ParticipantFields {
pub participant_name: Vec<String>,
pub role: Option<String>,
/// Fields from this participant's "self" block
pub self_fields: Vec<Field>,
/// Fields from this participant's "other" block (about other participants)
pub other_fields: Vec<Field>,
}
/// Resolve bidirectional relationships in a file
pub fn resolve_relationships(file: &File) -> Result<Vec<ResolvedRelationship>> {
// Group relationships by key
let mut relationship_groups: HashMap<RelationshipKey, Vec<RelationshipDecl>> = HashMap::new();
for decl in &file.declarations {
if let Declaration::Relationship(rel) = decl {
// Extract participant names
let participant_names: Vec<String> =
rel.participants.iter().map(|p| p.name.join("::")).collect();
let key = RelationshipKey::new(participant_names, rel.name.clone());
// Determine which participant is "self" based on self/other blocks
let self_index = rel
.participants
.iter()
.position(|p| p.self_block.is_some() || p.other_block.is_some());
relationship_groups
.entry(key)
.or_default()
.push(RelationshipDecl {
relationship: rel.clone(),
self_index,
});
}
}
// Merge grouped relationships
let mut resolved = Vec::new();
for (key, decls) in relationship_groups {
let merged = merge_relationship_declarations(&key, decls)?;
resolved.push(merged);
}
Ok(resolved)
}
/// Merge multiple declarations of the same relationship
fn merge_relationship_declarations(
key: &RelationshipKey,
decls: Vec<RelationshipDecl>,
) -> Result<ResolvedRelationship> {
if decls.is_empty() {
return Err(ResolveError::ValidationError {
message: "Empty relationship group".to_string(),
help: Some("This is an internal error - relationship groups should never be empty. Please report this as a bug.".to_string()),
});
}
// Start with the first declaration
let base = &decls[0].relationship;
let mut participant_fields: Vec<ParticipantFields> = base
.participants
.iter()
.map(|p| ParticipantFields {
participant_name: p.name.clone(),
role: p.role.clone(),
self_fields: p.self_block.clone().unwrap_or_default(),
other_fields: p.other_block.clone().unwrap_or_default(),
})
.collect();
// Merge additional declarations
for decl in decls.iter().skip(1) {
// If this declaration specifies a different participant as "self",
// merge their self/other blocks appropriately
if let Some(self_idx) = decl.self_index {
let participant_name = &decl.relationship.participants[self_idx].name;
// Find this participant in our merged list
if let Some(idx) = participant_fields
.iter()
.position(|pf| &pf.participant_name == participant_name)
{
// Merge self blocks
let self_block = decl.relationship.participants[self_idx]
.self_block
.clone()
.unwrap_or_default();
merge_fields(&mut participant_fields[idx].self_fields, self_block)?;
// Merge other blocks
let other_block = decl.relationship.participants[self_idx]
.other_block
.clone()
.unwrap_or_default();
merge_fields(&mut participant_fields[idx].other_fields, other_block)?;
}
}
}
// Merge shared fields (fields outside self/other blocks)
let mut merged_fields = base.fields.clone();
for decl in decls.iter().skip(1) {
merge_fields(&mut merged_fields, decl.relationship.fields.clone())?;
}
Ok(ResolvedRelationship {
name: key.name.clone(),
participants: base.participants.clone(),
fields: merged_fields,
participant_fields,
})
}
/// Merge field lists, detecting conflicts
fn merge_fields(target: &mut Vec<Field>, source: Vec<Field>) -> Result<()> {
for new_field in source {
// Check if field already exists
if let Some(existing) = target.iter().find(|f| f.name == new_field.name) {
// Fields must have the same value
if existing.value != new_field.value {
return Err(ResolveError::ValidationError {
message: format!(
"Conflicting values for field '{}' in relationship",
new_field.name
),
help: Some(format!(
"The field '{}' has different values in different declarations of the same relationship. Make sure all declarations of this relationship use the same value for shared fields.",
new_field.name
)),
});
}
// Same value, no need to add again
} else {
// New field, add it
target.push(new_field);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::ast::{
Span,
Value,
};
fn make_participant(name: &str, role: Option<&str>) -> Participant {
Participant {
name: vec![name.to_string()],
role: role.map(|s| s.to_string()),
self_block: None,
other_block: None,
span: Span::new(0, 10),
}
}
fn make_field(name: &str, value: i64) -> Field {
Field {
name: name.to_string(),
value: Value::Int(value),
span: Span::new(0, 10),
}
}
#[test]
fn test_relationship_key_order_independent() {
let key1 = RelationshipKey::new(
vec!["Martha".to_string(), "David".to_string()],
"Marriage".to_string(),
);
let key2 = RelationshipKey::new(
vec!["David".to_string(), "Martha".to_string()],
"Marriage".to_string(),
);
assert_eq!(key1, key2);
}
#[test]
fn test_single_relationship_declaration() {
let file = File {
declarations: vec![Declaration::Relationship(Relationship {
name: "Friendship".to_string(),
participants: vec![
make_participant("Alice", None),
make_participant("Bob", None),
],
fields: vec![make_field("bond", 80)],
span: Span::new(0, 10),
})],
};
let resolved = resolve_relationships(&file).unwrap();
assert_eq!(resolved.len(), 1);
assert_eq!(resolved[0].name, "Friendship");
assert_eq!(resolved[0].participants.len(), 2);
}
#[test]
fn test_bidirectional_relationship_merge() {
let mut martha_participant = make_participant("Martha", Some("spouse"));
martha_participant.self_block = Some(vec![make_field("bond", 90)]);
martha_participant.other_block = Some(vec![make_field("trust", 85)]);
let mut david_participant = make_participant("David", Some("spouse"));
david_participant.self_block = Some(vec![make_field("bond", 90)]);
david_participant.other_block = Some(vec![make_field("trust", 85)]);
let file = File {
declarations: vec![
Declaration::Relationship(Relationship {
name: "Marriage".to_string(),
participants: vec![
martha_participant.clone(),
make_participant("David", Some("spouse")),
],
fields: vec![],
span: Span::new(0, 10),
}),
Declaration::Relationship(Relationship {
name: "Marriage".to_string(),
participants: vec![
david_participant.clone(),
make_participant("Martha", Some("spouse")),
],
fields: vec![],
span: Span::new(20, 30),
}),
],
};
let resolved = resolve_relationships(&file).unwrap();
assert_eq!(resolved.len(), 1);
assert_eq!(resolved[0].name, "Marriage");
}
#[test]
fn test_conflicting_field_values() {
let mut p1 = make_participant("Alice", None);
p1.self_block = Some(vec![make_field("bond", 80)]);
let mut p2 = make_participant("Alice", None);
p2.self_block = Some(vec![make_field("bond", 90)]); // Different value
let file = File {
declarations: vec![
Declaration::Relationship(Relationship {
name: "Test".to_string(),
participants: vec![p1, make_participant("Bob", None)],
fields: vec![],
span: Span::new(0, 10),
}),
Declaration::Relationship(Relationship {
name: "Test".to_string(),
participants: vec![p2, make_participant("Bob", None)],
fields: vec![],
span: Span::new(20, 30),
}),
],
};
let result = resolve_relationships(&file);
assert!(result.is_err());
}
}

View File

@@ -0,0 +1,486 @@
//! Property tests for bidirectional relationship resolution
use proptest::prelude::*;
use crate::{
resolve::links::resolve_relationships,
syntax::ast::*,
};
// ===== Generators =====
fn valid_ident() -> impl Strategy<Value = String> {
"[a-zA-Z_][a-zA-Z0-9_]{0,15}".prop_filter("not a keyword", |s| {
!matches!(
s.as_str(),
"use" |
"character" |
"template" |
"life_arc" |
"schedule" |
"behavior" |
"institution" |
"relationship" |
"location" |
"species" |
"enum" |
"state" |
"on" |
"as" |
"self" |
"other" |
"remove" |
"append" |
"forall" |
"exists" |
"in" |
"where" |
"and" |
"or" |
"not" |
"is" |
"true" |
"false"
)
})
}
fn valid_field() -> impl Strategy<Value = Field> {
(valid_ident(), 0i64..100).prop_map(|(name, value)| Field {
name,
value: Value::Int(value),
span: Span::new(0, 10),
})
}
fn valid_field_list() -> impl Strategy<Value = Vec<Field>> {
prop::collection::vec(valid_field(), 0..5)
// Ensure unique field names
.prop_map(|fields| {
let mut unique_fields = Vec::new();
let mut seen_names = std::collections::HashSet::new();
for field in fields {
if seen_names.insert(field.name.clone()) {
unique_fields.push(field);
}
}
unique_fields
})
}
fn valid_participant(name: String) -> impl Strategy<Value = Participant> {
prop::option::of(valid_ident()).prop_map(move |role| Participant {
name: vec![name.clone()],
role,
self_block: None,
other_block: None,
span: Span::new(0, 10),
})
}
#[allow(dead_code)]
fn valid_participant_with_blocks(name: String) -> impl Strategy<Value = Participant> {
(
prop::option::of(valid_ident()),
prop::option::of(valid_field_list()),
prop::option::of(valid_field_list()),
)
.prop_map(move |(role, self_block, other_block)| Participant {
name: vec![name.clone()],
role,
self_block,
other_block,
span: Span::new(0, 10),
})
}
fn valid_relationship() -> impl Strategy<Value = Relationship> {
(
valid_ident(),
valid_ident(),
valid_ident(),
valid_field_list(),
)
.prop_flat_map(|(rel_name, person1, person2, fields)| {
(
Just(rel_name),
valid_participant(person1.clone()),
valid_participant(person2.clone()),
Just(fields),
)
})
.prop_map(|(name, p1, p2, fields)| Relationship {
name,
participants: vec![p1, p2],
fields,
span: Span::new(0, 10),
})
}
fn valid_bidirectional_relationship() -> impl Strategy<Value = (Relationship, Relationship)> {
(
valid_ident(),
valid_ident(),
valid_ident(),
valid_field_list(),
valid_field_list(),
)
.prop_flat_map(|(rel_name, person1, person2, shared_fields, self_fields)| {
let self_fields_clone = self_fields.clone();
(
Just(rel_name.clone()),
Just(person1.clone()),
Just(person2.clone()),
Just(shared_fields.clone()),
Just(self_fields),
Just(self_fields_clone),
)
})
.prop_map(|(name, p1_name, p2_name, shared, p1_self, p2_self)| {
// First declaration from p1's perspective
let p1 = Participant {
name: vec![p1_name.clone()],
role: None,
self_block: Some(p1_self),
other_block: None,
span: Span::new(0, 10),
};
let p2_in_p1_rel = Participant {
name: vec![p2_name.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(0, 10),
};
let rel1 = Relationship {
name: name.clone(),
participants: vec![p1, p2_in_p1_rel],
fields: shared.clone(),
span: Span::new(0, 10),
};
// Second declaration from p2's perspective
let p2 = Participant {
name: vec![p2_name],
role: None,
self_block: Some(p2_self),
other_block: None,
span: Span::new(20, 30),
};
let p1_in_p2_rel = Participant {
name: vec![p1_name],
role: None,
self_block: None,
other_block: None,
span: Span::new(20, 30),
};
let rel2 = Relationship {
name,
participants: vec![p2, p1_in_p2_rel],
fields: shared,
span: Span::new(20, 30),
};
(rel1, rel2)
})
}
// ===== Property Tests =====
proptest! {
#[test]
fn test_single_relationship_always_resolves(rel in valid_relationship()) {
let file = File {
declarations: vec![Declaration::Relationship(rel)],
};
let result = resolve_relationships(&file);
assert!(result.is_ok(), "Single relationship should always resolve");
let resolved = result.unwrap();
assert_eq!(resolved.len(), 1);
}
#[test]
fn test_relationship_participant_count_preserved(rel in valid_relationship()) {
let file = File {
declarations: vec![Declaration::Relationship(rel.clone())],
};
let resolved = resolve_relationships(&file).unwrap();
assert_eq!(resolved[0].participants.len(), rel.participants.len());
}
#[test]
fn test_relationship_fields_preserved(rel in valid_relationship()) {
let file = File {
declarations: vec![Declaration::Relationship(rel.clone())],
};
let resolved = resolve_relationships(&file).unwrap();
assert_eq!(resolved[0].fields.len(), rel.fields.len());
}
#[test]
fn test_bidirectional_relationships_merge(
(rel1, rel2) in valid_bidirectional_relationship()
) {
let file = File {
declarations: vec![
Declaration::Relationship(rel1),
Declaration::Relationship(rel2),
],
};
let result = resolve_relationships(&file);
assert!(result.is_ok(), "Bidirectional relationships should merge successfully");
let resolved = result.unwrap();
// Should merge into single relationship
assert_eq!(resolved.len(), 1);
}
#[test]
fn test_participant_order_doesnt_matter(
name in valid_ident(),
p1 in valid_ident(),
p2 in valid_ident(),
fields in valid_field_list()
) {
// Create two identical relationships with participants in different order
let rel1 = Relationship {
name: name.clone(),
participants: vec![
Participant {
name: vec![p1.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(0, 10),
},
Participant {
name: vec![p2.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(0, 10),
},
],
fields: fields.clone(),
span: Span::new(0, 10),
};
let rel2 = Relationship {
name: name.clone(),
participants: vec![
Participant {
name: vec![p2.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(20, 30),
},
Participant {
name: vec![p1.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(20, 30),
},
],
fields,
span: Span::new(20, 30),
};
let file = File {
declarations: vec![
Declaration::Relationship(rel1),
Declaration::Relationship(rel2),
],
};
let result = resolve_relationships(&file);
assert!(result.is_ok());
let resolved = result.unwrap();
// Should recognize as same relationship despite order
assert_eq!(resolved.len(), 1);
}
#[test]
fn test_different_relationships_stay_separate(
name1 in valid_ident(),
name2 in valid_ident(),
p1 in valid_ident(),
p2 in valid_ident()
) {
// Skip if names are the same
if name1 == name2 {
return Ok(());
}
let rel1 = Relationship {
name: name1,
participants: vec![
Participant {
name: vec![p1.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(0, 10),
},
Participant {
name: vec![p2.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(0, 10),
},
],
fields: vec![],
span: Span::new(0, 10),
};
let rel2 = Relationship {
name: name2,
participants: vec![
Participant {
name: vec![p1],
role: None,
self_block: None,
other_block: None,
span: Span::new(20, 30),
},
Participant {
name: vec![p2],
role: None,
self_block: None,
other_block: None,
span: Span::new(20, 30),
},
],
fields: vec![],
span: Span::new(20, 30),
};
let file = File {
declarations: vec![
Declaration::Relationship(rel1),
Declaration::Relationship(rel2),
],
};
let result = resolve_relationships(&file);
assert!(result.is_ok());
let resolved = result.unwrap();
// Different relationship names should stay separate
assert_eq!(resolved.len(), 2);
}
#[test]
fn test_self_blocks_are_merged(
name in valid_ident(),
p1 in valid_ident(),
p2 in valid_ident(),
fields1 in valid_field_list(),
fields2 in valid_field_list()
) {
let participant1 = Participant {
name: vec![p1.clone()],
role: None,
self_block: Some(fields1),
other_block: None,
span: Span::new(0, 10),
};
let participant1_again = Participant {
name: vec![p1.clone()],
role: None,
self_block: Some(fields2),
other_block: None,
span: Span::new(20, 30),
};
let rel1 = Relationship {
name: name.clone(),
participants: vec![
participant1,
Participant {
name: vec![p2.clone()],
role: None,
self_block: None,
other_block: None,
span: Span::new(0, 10),
},
],
fields: vec![],
span: Span::new(0, 10),
};
let rel2 = Relationship {
name: name.clone(),
participants: vec![
participant1_again,
Participant {
name: vec![p2],
role: None,
self_block: None,
other_block: None,
span: Span::new(20, 30),
},
],
fields: vec![],
span: Span::new(20, 30),
};
let file = File {
declarations: vec![
Declaration::Relationship(rel1),
Declaration::Relationship(rel2),
],
};
let result = resolve_relationships(&file);
// Should succeed unless there are conflicting field values
// (which is tested separately)
if result.is_ok() {
let resolved = result.unwrap();
assert_eq!(resolved.len(), 1);
}
}
#[test]
fn test_empty_file_gives_empty_result(
decls in prop::collection::vec(
prop_oneof![
valid_ident().prop_map(|name| Declaration::Character(Character {
name,
fields: vec![],
template: None,
span: Span::new(0, 10),
})),
valid_ident().prop_map(|name| Declaration::Template(Template {
name,
fields: vec![],
strict: false,
includes: vec![],
span: Span::new(0, 10),
})),
],
0..5
)
) {
// File with no relationships
let file = File { declarations: decls };
let result = resolve_relationships(&file);
assert!(result.is_ok());
let resolved = result.unwrap();
assert_eq!(resolved.len(), 0);
}
}

762
src/resolve/merge.rs Normal file
View File

@@ -0,0 +1,762 @@
//! Template composition and merge engine
//!
//! Handles two types of template composition:
//! 1. Template includes (vertical composition): `template Person { include
//! Human ... }`
//! 2. Character template inheritance (horizontal composition): `character
//! Martha from Person, Worker { ... }`
//!
//! Also handles legacy @BaseTemplate { ... } syntax for template overrides
//! with:
//! - Set operations (field: value) - replace or add field
//! - Remove operations (remove field) - delete field
//! - Append operations (append field: value) - add new field (error if exists)
use std::collections::HashSet;
use crate::{
resolve::{
names::NameTable,
ResolveError,
Result,
},
syntax::ast::{
Character,
Declaration,
Field,
OverrideOp,
Template,
Value,
},
};
// ===== Template Composition =====
/// Resolve a template by recursively merging all its includes
///
/// Algorithm:
/// 1. Recursively resolve all included templates (depth-first)
/// 2. Merge included template fields (later includes override earlier ones)
/// 3. Add the template's own fields on top
///
/// Returns the fully merged fields for this template
pub fn resolve_template_includes(
template: &Template,
declarations: &[Declaration],
name_table: &NameTable,
visited: &mut HashSet<String>,
) -> Result<Vec<Field>> {
// Detect circular includes
if !visited.insert(template.name.clone()) {
return Err(ResolveError::CircularDependency {
cycle: format!(
"Circular template include detected: {} -> {}",
visited.iter().cloned().collect::<Vec<_>>().join(" -> "),
template.name
),
});
}
let mut merged_fields = Vec::new();
// Resolve all includes first
for include_name in &template.includes {
// Look up the included template
let entry = name_table
.lookup(std::slice::from_ref(include_name))
.ok_or_else(|| ResolveError::NameNotFound {
name: include_name.clone(),
suggestion: name_table.find_suggestion(include_name),
})?;
// Get the template declaration
let included_template = match &declarations[entry.decl_index] {
| Declaration::Template(t) => t,
| _ => {
return Err(ResolveError::ValidationError {
message: format!(
"Cannot include '{}': it's not a template",
include_name
),
help: Some(format!(
"The 'include' keyword can only reference templates. '{}' is a different type of declaration. Make sure you're including the correct name and that it refers to a template.",
include_name
)),
});
},
};
// Recursively resolve the included template
let included_fields =
resolve_template_includes(included_template, declarations, name_table, visited)?;
// Merge included fields (replacing any existing fields with same name)
merged_fields = merge_field_lists(merged_fields, included_fields);
}
// Add this template's own fields on top
merged_fields = merge_field_lists(merged_fields, template.fields.clone());
// Remove this template from visited set (allow it to be used in other branches)
visited.remove(&template.name);
Ok(merged_fields)
}
/// Merge character templates into character fields
///
/// Algorithm:
/// 1. Resolve each template (which may itself include other templates)
/// 2. Merge templates left to right (later templates override earlier ones)
/// 3. Add character's own fields on top
/// 4. If any template is strict, validate that all its fields are concrete
///
/// Returns the fully merged fields for this character
pub fn merge_character_templates(
character: &Character,
declarations: &[Declaration],
name_table: &NameTable,
) -> Result<Vec<Field>> {
let mut merged_fields = Vec::new();
let mut strict_templates = Vec::new();
// If character has templates, merge them
if let Some(template_names) = &character.template {
for template_name in template_names {
// Look up the template
let entry = name_table
.lookup(std::slice::from_ref(template_name))
.ok_or_else(|| ResolveError::NameNotFound {
name: template_name.clone(),
suggestion: name_table.find_suggestion(template_name),
})?;
// Get the template declaration
let template = match &declarations[entry.decl_index] {
| Declaration::Template(t) => t,
| _ => {
return Err(ResolveError::ValidationError {
message: format!(
"Character '{}' cannot inherit from '{}': it's not a template",
character.name, template_name
),
help: Some(format!(
"The 'from' keyword can only reference templates. '{}' is a different type of declaration. Make sure you're inheriting from the correct name and that it refers to a template.",
template_name
)),
});
},
};
// Track strict templates for validation
if template.strict {
strict_templates.push(template_name.clone());
}
// Resolve template (which handles includes recursively)
let mut visited = HashSet::new();
let template_fields =
resolve_template_includes(template, declarations, name_table, &mut visited)?;
// Merge template fields into accumulated fields
merged_fields = merge_field_lists(merged_fields, template_fields);
}
}
// Add character's own fields on top
merged_fields = merge_field_lists(merged_fields, character.fields.clone());
// Validate strict mode: all strict template fields must have concrete values
if !strict_templates.is_empty() {
validate_strict_templates(&character.name, &merged_fields, &strict_templates)?;
}
Ok(merged_fields)
}
/// Merge two field lists, with fields from the second list overriding the first
fn merge_field_lists(base: Vec<Field>, override_fields: Vec<Field>) -> Vec<Field> {
let mut merged = base;
for field in override_fields {
// If field exists, replace it; otherwise add it
if let Some(existing) = merged.iter_mut().find(|f| f.name == field.name) {
existing.value = field.value.clone();
existing.span = field.span.clone();
} else {
merged.push(field);
}
}
merged
}
/// Validate that strict template requirements are met
///
/// For strict templates, all fields must have concrete values (not ranges)
fn validate_strict_templates(
character_name: &str,
fields: &[Field],
strict_templates: &[String],
) -> Result<()> {
for field in fields {
if let Value::Range(_, _) = &field.value {
return Err(ResolveError::ValidationError {
message: format!(
"Character '{}' inherits from strict template(s) {}, but field '{}' has a range value instead of a concrete value",
character_name,
strict_templates.join(", "),
field.name
),
help: Some(format!(
"Strict templates require all fields to have concrete values. Replace the range in '{}' with a specific value. For example, instead of '18..65', use a specific age like '34'.",
field.name
)),
});
}
}
Ok(())
}
// ===== Legacy Override System =====
/// Apply an override to a base template's fields
///
/// This performs a structural merge:
/// 1. Start with all fields from base
/// 2. Apply each override operation in order
/// 3. Return merged field list
pub fn apply_override(base_fields: Vec<Field>, override_ops: &[OverrideOp]) -> Result<Vec<Field>> {
let mut merged = base_fields;
for op in override_ops {
match op {
| OverrideOp::Set(field) => {
// Replace existing field or add new one
if let Some(existing) = merged.iter_mut().find(|f| f.name == field.name) {
existing.value = field.value.clone();
existing.span = field.span.clone();
} else {
merged.push(field.clone());
}
},
| OverrideOp::Remove(name) => {
// Remove field if it exists
merged.retain(|f| f.name != *name);
},
| OverrideOp::Append(field) => {
// Add field only if it doesn't exist
if merged.iter().any(|f| f.name == field.name) {
return Err(ResolveError::ValidationError {
message: format!(
"Cannot append field '{}': field already exists",
field.name
),
help: Some(format!(
"The 'append' operation is used to add new fields that don't exist in the base template. The field '{}' already exists. Use 'set' instead to update an existing field, or use a different field name.",
field.name
)),
});
}
merged.push(field.clone());
},
}
}
Ok(merged)
}
/// Recursively resolve overrides in a value
///
/// If the value contains an Override, look up the base template
/// and apply the override operations
pub fn resolve_value_overrides(value: &Value, name_table: &NameTable) -> Result<Value> {
match value {
| Value::Override(override_spec) => {
// Look up the base template
let _base_entry = name_table.lookup(&override_spec.base).ok_or_else(|| {
ResolveError::NameNotFound {
name: override_spec.base.join("::"),
suggestion: name_table
.find_suggestion(override_spec.base.last().unwrap_or(&String::new())),
}
})?;
// For now, we'll return an error since we need the actual template fields
// In a full implementation, we'd extract the fields from the base declaration
Err(ResolveError::ValidationError {
message: format!(
"Override resolution not yet fully implemented for base '{}'",
override_spec.base.join("::")
),
help: Some("Template overrides are not yet supported. This feature is planned for a future release. For now, define characters directly without using template inheritance.".to_string()),
})
},
| Value::List(items) => {
// Recursively resolve overrides in list items
let resolved: Result<Vec<_>> = items
.iter()
.map(|v| resolve_value_overrides(v, name_table))
.collect();
Ok(Value::List(resolved?))
},
| Value::Object(fields) => {
// Recursively resolve overrides in object fields
let resolved_fields: Result<Vec<_>> = fields
.iter()
.map(|f| {
let resolved_value = resolve_value_overrides(&f.value, name_table)?;
Ok(Field {
name: f.name.clone(),
value: resolved_value,
span: f.span.clone(),
})
})
.collect();
Ok(Value::Object(resolved_fields?))
},
// Other value types don't contain overrides
| _ => Ok(value.clone()),
}
}
/// Check if applying the same override twice gives the same result
/// (idempotence)
pub fn is_idempotent(base: &[Field], ops: &[OverrideOp]) -> bool {
let result1 = apply_override(base.to_vec(), ops);
if result1.is_err() {
return false;
}
let intermediate = result1.unwrap();
let result2 = apply_override(intermediate.clone(), ops);
if result2.is_err() {
return false;
}
// Should get the same result
intermediate == result2.unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::ast::Span;
fn make_field(name: &str, value: i64) -> Field {
Field {
name: name.to_string(),
value: Value::Int(value),
span: Span::new(0, 10),
}
}
#[test]
fn test_set_replaces_existing_field() {
let base = vec![make_field("age", 25), make_field("health", 100)];
let ops = vec![OverrideOp::Set(make_field("age", 30))];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 2);
let age_field = result.iter().find(|f| f.name == "age").unwrap();
assert_eq!(age_field.value, Value::Int(30));
}
#[test]
fn test_set_adds_new_field() {
let base = vec![make_field("age", 25)];
let ops = vec![OverrideOp::Set(make_field("health", 100))];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 2);
assert!(result.iter().any(|f| f.name == "health"));
}
#[test]
fn test_remove_deletes_field() {
let base = vec![
make_field("age", 25),
make_field("health", 100),
make_field("energy", 50),
];
let ops = vec![OverrideOp::Remove("health".to_string())];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 2);
assert!(!result.iter().any(|f| f.name == "health"));
assert!(result.iter().any(|f| f.name == "age"));
assert!(result.iter().any(|f| f.name == "energy"));
}
#[test]
fn test_remove_nonexistent_field_is_noop() {
let base = vec![make_field("age", 25)];
let ops = vec![OverrideOp::Remove("nonexistent".to_string())];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].name, "age");
}
#[test]
fn test_append_adds_new_field() {
let base = vec![make_field("age", 25)];
let ops = vec![OverrideOp::Append(make_field("health", 100))];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 2);
assert!(result.iter().any(|f| f.name == "health"));
}
#[test]
fn test_append_existing_field_errors() {
let base = vec![make_field("age", 25)];
let ops = vec![OverrideOp::Append(make_field("age", 30))];
let result = apply_override(base, &ops);
assert!(result.is_err());
}
#[test]
fn test_multiple_operations() {
let base = vec![
make_field("age", 25),
make_field("health", 100),
make_field("energy", 50),
];
let ops = vec![
OverrideOp::Set(make_field("age", 30)),
OverrideOp::Remove("energy".to_string()),
OverrideOp::Append(make_field("strength", 75)),
];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 3);
let age = result.iter().find(|f| f.name == "age").unwrap();
assert_eq!(age.value, Value::Int(30));
assert!(!result.iter().any(|f| f.name == "energy"));
assert!(result.iter().any(|f| f.name == "strength"));
}
#[test]
fn test_set_is_idempotent() {
let base = vec![make_field("age", 25)];
let ops = vec![OverrideOp::Set(make_field("age", 30))];
assert!(is_idempotent(&base, &ops));
}
#[test]
fn test_remove_is_idempotent() {
let base = vec![make_field("age", 25), make_field("health", 100)];
let ops = vec![OverrideOp::Remove("health".to_string())];
assert!(is_idempotent(&base, &ops));
}
#[test]
fn test_append_is_not_idempotent() {
let base = vec![make_field("age", 25)];
let ops = vec![OverrideOp::Append(make_field("health", 100))];
// Append is NOT idempotent because second application would try to
// append to a list that already has the field
assert!(!is_idempotent(&base, &ops));
}
// ===== Template Composition Tests =====
use crate::syntax::ast::File;
fn make_file(declarations: Vec<Declaration>) -> File {
File { declarations }
}
fn make_template(
name: &str,
fields: Vec<Field>,
includes: Vec<&str>,
strict: bool,
) -> Template {
Template {
name: name.to_string(),
fields,
includes: includes.iter().map(|s| s.to_string()).collect(),
strict,
span: Span::new(0, 10),
}
}
fn make_character(name: &str, fields: Vec<Field>, templates: Vec<&str>) -> Character {
Character {
name: name.to_string(),
fields,
template: if templates.is_empty() {
None
} else {
Some(templates.iter().map(|s| s.to_string()).collect())
},
span: Span::new(0, 10),
}
}
#[test]
fn test_resolve_template_with_no_includes() {
let template = make_template("Person", vec![make_field("age", 25)], vec![], false);
let declarations = vec![Declaration::Template(template.clone())];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let mut visited = HashSet::new();
let result =
resolve_template_includes(&template, &declarations, &name_table, &mut visited).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].name, "age");
assert_eq!(result[0].value, Value::Int(25));
}
#[test]
fn test_resolve_template_with_single_include() {
let base = make_template("Human", vec![make_field("age", 0)], vec![], false);
let derived = make_template("Person", vec![make_field("name", 0)], vec!["Human"], false);
let declarations = vec![
Declaration::Template(base),
Declaration::Template(derived.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let mut visited = HashSet::new();
let result =
resolve_template_includes(&derived, &declarations, &name_table, &mut visited).unwrap();
assert_eq!(result.len(), 2);
assert!(result.iter().any(|f| f.name == "age"));
assert!(result.iter().any(|f| f.name == "name"));
}
#[test]
fn test_resolve_template_with_chained_includes() {
let base = make_template("Being", vec![make_field("alive", 1)], vec![], false);
let middle = make_template("Human", vec![make_field("age", 0)], vec!["Being"], false);
let top = make_template("Person", vec![make_field("name", 0)], vec!["Human"], false);
let declarations = vec![
Declaration::Template(base),
Declaration::Template(middle),
Declaration::Template(top.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let mut visited = HashSet::new();
let result =
resolve_template_includes(&top, &declarations, &name_table, &mut visited).unwrap();
assert_eq!(result.len(), 3);
assert!(result.iter().any(|f| f.name == "alive"));
assert!(result.iter().any(|f| f.name == "age"));
assert!(result.iter().any(|f| f.name == "name"));
}
#[test]
fn test_resolve_template_field_override() {
let base = make_template("Human", vec![make_field("age", 0)], vec![], false);
let derived = make_template(
"Person",
vec![make_field("age", 25)], // Override with concrete value
vec!["Human"],
false,
);
let declarations = vec![
Declaration::Template(base),
Declaration::Template(derived.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let mut visited = HashSet::new();
let result =
resolve_template_includes(&derived, &declarations, &name_table, &mut visited).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].name, "age");
assert_eq!(result[0].value, Value::Int(25)); // Should be overridden
// value
}
#[test]
fn test_merge_character_templates_single() {
let template = make_template("Person", vec![make_field("age", 0)], vec![], false);
let character = make_character("Martha", vec![make_field("age", 34)], vec!["Person"]);
let declarations = vec![
Declaration::Template(template),
Declaration::Character(character.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let result = merge_character_templates(&character, &declarations, &name_table).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].name, "age");
assert_eq!(result[0].value, Value::Int(34)); // Character's value
// overrides template
}
#[test]
fn test_merge_character_templates_multiple() {
let physical = make_template("Physical", vec![make_field("height", 0)], vec![], false);
let mental = make_template("Mental", vec![make_field("iq", 0)], vec![], false);
let character = make_character(
"Martha",
vec![make_field("height", 165), make_field("iq", 120)],
vec!["Physical", "Mental"],
);
let declarations = vec![
Declaration::Template(physical),
Declaration::Template(mental),
Declaration::Character(character.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let result = merge_character_templates(&character, &declarations, &name_table).unwrap();
assert_eq!(result.len(), 2);
assert!(result
.iter()
.any(|f| f.name == "height" && f.value == Value::Int(165)));
assert!(result
.iter()
.any(|f| f.name == "iq" && f.value == Value::Int(120)));
}
#[test]
fn test_merge_character_templates_with_includes() {
let base = make_template("Human", vec![make_field("age", 0)], vec![], false);
let derived = make_template("Person", vec![make_field("name", 0)], vec!["Human"], false);
let character = make_character(
"Martha",
vec![make_field("age", 34), make_field("name", 1)],
vec!["Person"],
);
let declarations = vec![
Declaration::Template(base),
Declaration::Template(derived),
Declaration::Character(character.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let result = merge_character_templates(&character, &declarations, &name_table).unwrap();
assert_eq!(result.len(), 2);
assert!(result
.iter()
.any(|f| f.name == "age" && f.value == Value::Int(34)));
assert!(result
.iter()
.any(|f| f.name == "name" && f.value == Value::Int(1)));
}
#[test]
fn test_strict_template_validation_passes() {
let template = make_template("Person", vec![make_field("age", 0)], vec![], true);
let character = make_character("Martha", vec![make_field("age", 34)], vec!["Person"]);
let declarations = vec![
Declaration::Template(template),
Declaration::Character(character.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let result = merge_character_templates(&character, &declarations, &name_table);
assert!(result.is_ok());
}
#[test]
fn test_strict_template_validation_fails_with_range() {
let template = make_template(
"Person",
vec![Field {
name: "age".to_string(),
value: Value::Range(Box::new(Value::Int(18)), Box::new(Value::Int(65))),
span: Span::new(0, 10),
}],
vec![],
true,
);
let character = make_character("Martha", vec![], vec!["Person"]);
let declarations = vec![
Declaration::Template(template),
Declaration::Character(character.clone()),
];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let result = merge_character_templates(&character, &declarations, &name_table);
assert!(result.is_err());
if let Err(ResolveError::ValidationError { message, .. }) = result {
assert!(message.contains("strict template"));
assert!(message.contains("range value"));
}
}
#[test]
fn test_circular_include_detection() {
let a = make_template("A", vec![], vec!["B"], false);
let b = make_template("B", vec![], vec!["A"], false);
let declarations = vec![Declaration::Template(a.clone()), Declaration::Template(b)];
let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap();
let mut visited = HashSet::new();
let result = resolve_template_includes(&a, &declarations, &name_table, &mut visited);
assert!(result.is_err());
if let Err(ResolveError::CircularDependency { .. }) = result {
// Expected
} else {
panic!("Expected CircularDependency error");
}
}
#[test]
fn test_merge_field_lists_override() {
let base = vec![make_field("age", 25), make_field("health", 100)];
let overrides = vec![make_field("age", 30)];
let result = merge_field_lists(base, overrides);
assert_eq!(result.len(), 2);
let age = result.iter().find(|f| f.name == "age").unwrap();
assert_eq!(age.value, Value::Int(30));
}
#[test]
fn test_merge_field_lists_add_new() {
let base = vec![make_field("age", 25)];
let overrides = vec![make_field("health", 100)];
let result = merge_field_lists(base, overrides);
assert_eq!(result.len(), 2);
assert!(result.iter().any(|f| f.name == "age"));
assert!(result.iter().any(|f| f.name == "health"));
}
}

View File

@@ -0,0 +1,338 @@
//! Property tests for override merge engine
use proptest::prelude::*;
use crate::{
resolve::merge::{
apply_override,
is_idempotent,
},
syntax::ast::*,
};
// ===== Generators =====
fn valid_ident() -> impl Strategy<Value = String> {
"[a-zA-Z_][a-zA-Z0-9_]{0,15}"
}
fn valid_field() -> impl Strategy<Value = Field> {
(valid_ident(), 0i64..1000).prop_map(|(name, value)| Field {
name,
value: Value::Int(value),
span: Span::new(0, 10),
})
}
fn valid_field_list() -> impl Strategy<Value = Vec<Field>> {
prop::collection::vec(valid_field(), 0..10)
// Ensure unique field names
.prop_map(|fields| {
let mut unique_fields = Vec::new();
let mut seen_names = std::collections::HashSet::new();
for field in fields {
if seen_names.insert(field.name.clone()) {
unique_fields.push(field);
}
}
unique_fields
})
}
fn valid_set_op() -> impl Strategy<Value = OverrideOp> {
valid_field().prop_map(OverrideOp::Set)
}
fn valid_remove_op() -> impl Strategy<Value = OverrideOp> {
valid_ident().prop_map(OverrideOp::Remove)
}
fn valid_append_op() -> impl Strategy<Value = OverrideOp> {
valid_field().prop_map(OverrideOp::Append)
}
fn valid_override_ops() -> impl Strategy<Value = Vec<OverrideOp>> {
prop::collection::vec(
prop_oneof![valid_set_op(), valid_remove_op(), valid_append_op(),],
0..10,
)
}
// ===== Property Tests =====
proptest! {
#[test]
fn test_empty_override_returns_base(base in valid_field_list()) {
let ops: Vec<OverrideOp> = vec![];
let result = apply_override(base.clone(), &ops).unwrap();
assert_eq!(result, base);
}
#[test]
fn test_override_never_panics(
base in valid_field_list(),
ops in valid_override_ops()
) {
// Should never panic, might return error
let _ = apply_override(base, &ops);
}
#[test]
fn test_set_always_succeeds(
base in valid_field_list(),
field in valid_field()
) {
let ops = vec![OverrideOp::Set(field)];
let result = apply_override(base, &ops);
assert!(result.is_ok(), "Set operations should always succeed");
}
#[test]
fn test_remove_always_succeeds(
base in valid_field_list(),
name in valid_ident()
) {
let ops = vec![OverrideOp::Remove(name)];
let result = apply_override(base, &ops);
assert!(result.is_ok(), "Remove operations should always succeed");
}
#[test]
fn test_set_adds_or_replaces(
base in valid_field_list(),
field in valid_field()
) {
let ops = vec![OverrideOp::Set(field.clone())];
let result = apply_override(base.clone(), &ops).unwrap();
// Result should contain the field
assert!(result.iter().any(|f| f.name == field.name));
// If field was in base, length should be same
// If field was not in base, length should increase by 1
let was_in_base = base.iter().any(|f| f.name == field.name);
if was_in_base {
assert_eq!(result.len(), base.len());
} else {
assert_eq!(result.len(), base.len() + 1);
}
}
#[test]
fn test_remove_decreases_or_maintains_length(
base in valid_field_list(),
name in valid_ident()
) {
let ops = vec![OverrideOp::Remove(name.clone())];
let result = apply_override(base.clone(), &ops).unwrap();
// Result should not contain the field
assert!(!result.iter().any(|f| f.name == name));
// Length should be <= base length
assert!(result.len() <= base.len());
}
#[test]
fn test_append_to_empty_succeeds(field in valid_field()) {
let base = vec![];
let ops = vec![OverrideOp::Append(field.clone())];
let result = apply_override(base, &ops).unwrap();
assert_eq!(result.len(), 1);
assert_eq!(result[0].name, field.name);
}
#[test]
fn test_append_duplicate_fails(
base in valid_field_list().prop_filter("non-empty", |f| !f.is_empty())
) {
// Try to append a field that already exists
let existing_field = base[0].clone();
let ops = vec![OverrideOp::Append(existing_field)];
let result = apply_override(base, &ops);
assert!(result.is_err(), "Appending duplicate field should fail");
}
#[test]
fn test_set_is_idempotent_property(
base in valid_field_list(),
field in valid_field()
) {
let ops = vec![OverrideOp::Set(field)];
assert!(is_idempotent(&base, &ops));
}
#[test]
fn test_remove_is_idempotent_property(
base in valid_field_list(),
name in valid_ident()
) {
let ops = vec![OverrideOp::Remove(name)];
assert!(is_idempotent(&base, &ops));
}
#[test]
fn test_multiple_sets_last_wins(
base in valid_field_list(),
name in valid_ident(),
val1 in 0i64..100,
val2 in 100i64..200
) {
let field1 = Field {
name: name.clone(),
value: Value::Int(val1),
span: Span::new(0, 10),
};
let field2 = Field {
name: name.clone(),
value: Value::Int(val2),
span: Span::new(0, 10),
};
let ops = vec![
OverrideOp::Set(field1),
OverrideOp::Set(field2.clone()),
];
let result = apply_override(base, &ops).unwrap();
let final_field = result.iter().find(|f| f.name == name).unwrap();
// Last set should win
assert_eq!(final_field.value, field2.value);
}
#[test]
fn test_set_after_remove_adds_back(
base in valid_field_list().prop_filter("non-empty", |f| !f.is_empty())
) {
let field = base[0].clone();
let name = field.name.clone();
let ops = vec![
OverrideOp::Remove(name.clone()),
OverrideOp::Set(field.clone()),
];
let result = apply_override(base, &ops).unwrap();
// Field should exist after remove + set
assert!(result.iter().any(|f| f.name == name));
}
#[test]
fn test_remove_after_set_cancels_out(
base in valid_field_list(),
field in valid_field()
) {
let name = field.name.clone();
let ops = vec![
OverrideOp::Set(field),
OverrideOp::Remove(name.clone()),
];
let result = apply_override(base, &ops).unwrap();
// Field should not exist after set + remove
assert!(!result.iter().any(|f| f.name == name));
}
#[test]
fn test_operation_order_matters(
base in valid_field_list(),
name in valid_ident(),
val1 in 0i64..100,
val2 in 100i64..200
) {
let field1 = Field {
name: name.clone(),
value: Value::Int(val1),
span: Span::new(0, 10),
};
let field2 = Field {
name: name.clone(),
value: Value::Int(val2),
span: Span::new(0, 10),
};
let ops1 = vec![
OverrideOp::Set(field1.clone()),
OverrideOp::Set(field2.clone()),
];
let ops2 = vec![
OverrideOp::Set(field2.clone()),
OverrideOp::Set(field1.clone()),
];
let result1 = apply_override(base.clone(), &ops1).unwrap();
let result2 = apply_override(base, &ops2).unwrap();
// Different order should give different results (last set wins)
let value1 = result1.iter().find(|f| f.name == name).unwrap().value.clone();
let value2 = result2.iter().find(|f| f.name == name).unwrap().value.clone();
assert_eq!(value1, Value::Int(val2));
assert_eq!(value2, Value::Int(val1));
}
#[test]
fn test_append_after_remove_succeeds(
base in valid_field_list().prop_filter("non-empty", |f| !f.is_empty())
) {
let field = base[0].clone();
let name = field.name.clone();
let ops = vec![
OverrideOp::Remove(name.clone()),
OverrideOp::Append(field.clone()),
];
let result = apply_override(base, &ops);
// Should succeed - field was removed then appended
assert!(result.is_ok());
let fields = result.unwrap();
assert!(fields.iter().any(|f| f.name == name));
}
#[test]
fn test_associativity_for_sets(
base in valid_field_list(),
f1 in valid_field(),
f2 in valid_field(),
f3 in valid_field()
) {
// Skip if any fields have the same name
if f1.name == f2.name || f2.name == f3.name || f1.name == f3.name {
return Ok(());
}
// (f1, f2), f3
let ops1 = vec![
OverrideOp::Set(f1.clone()),
OverrideOp::Set(f2.clone()),
];
let intermediate = apply_override(base.clone(), &ops1).unwrap();
let result1 = apply_override(intermediate, &[OverrideOp::Set(f3.clone())]).unwrap();
// f1, (f2, f3)
let ops2 = vec![
OverrideOp::Set(f2.clone()),
OverrideOp::Set(f3.clone()),
];
let intermediate = apply_override(base.clone(), &ops2).unwrap();
let result2 = apply_override(intermediate, &[OverrideOp::Set(f1.clone())]).unwrap();
// Results might differ due to field order, but should have same fields
assert_eq!(result1.len(), result2.len());
for field in &result1 {
assert!(result2.iter().any(|f| f.name == field.name));
}
}
}

228
src/resolve/mod.rs Normal file
View File

@@ -0,0 +1,228 @@
//! Resolution engine for Storybook DSL
//!
//! This module takes parsed AST and:
//! - Builds a name table mapping qualified paths to declarations
//! - Resolves cross-references between entities
//! - Handles `use` statements
//! - Validates semantic constraints
//! - Produces rich error diagnostics
#![allow(unused_assignments)] // False positives in error enum fields used by thiserror
pub mod convert;
pub mod links;
pub mod merge;
pub mod names;
pub mod types;
pub mod validate;
#[cfg(test)]
mod prop_tests;
#[cfg(test)]
mod integration_tests;
#[cfg(test)]
#[path = "links_prop_tests.rs"]
mod links_prop_tests;
#[cfg(test)]
#[path = "merge_prop_tests.rs"]
mod merge_prop_tests;
#[cfg(test)]
#[path = "validate_prop_tests.rs"]
mod validate_prop_tests;
#[cfg(test)]
#[path = "convert_prop_tests.rs"]
mod convert_prop_tests;
#[cfg(test)]
#[path = "convert_integration_tests.rs"]
mod convert_integration_tests;
use miette::Diagnostic;
pub use names::{
NameTable,
QualifiedPath,
};
use thiserror::Error;
pub use types::ResolvedFile;
use crate::syntax::ast::File;
/// Errors that can occur during resolution
#[derive(Error, Debug, Diagnostic)]
pub enum ResolveError {
#[error("Name '{name}' not found")]
NameNotFound {
#[allow(dead_code)]
#[allow(unused_assignments)]
name: String,
#[help]
suggestion: Option<String>,
},
#[error("Duplicate definition of '{name}'")]
#[diagnostic(help("Each name must be unique within a file. You've defined '{name}' more than once. Either rename one of them or remove the duplicate definition. If you meant to extend or override the first definition, use template inheritance instead."))]
DuplicateDefinition {
#[allow(dead_code)]
#[allow(unused_assignments)]
name: String,
#[label("first defined here")]
first_location: (usize, usize),
#[label("redefined here")]
second_location: (usize, usize),
},
#[error("Circular dependency detected")]
CircularDependency {
#[help]
cycle: String,
},
#[error("Invalid field access: {message}")]
#[diagnostic(help("You're trying to access a field that doesn't exist or isn't accessible in this context. Check the field name for typos and make sure it's defined on the object you're accessing. In relationships, use 'self' for your own fields and 'other' for the other participant's fields."))]
InvalidFieldAccess { message: String },
#[error("Type mismatch: {message}")]
#[diagnostic(help("The value you provided doesn't match the expected type. For example, you might be using a number where text is expected, or vice versa. Check that your value matches what's expected: numbers for ages, decimals (0.0-1.0) for traits, strings in quotes for text, true/false for yes/no values."))]
TypeMismatch { message: String },
#[error("Validation error: {message}")]
ValidationError {
message: String,
#[help]
help: Option<String>,
},
#[error("Life arc '{life_arc}': state '{state}' has transition to unknown state '{target}'")]
#[diagnostic(help("Make sure the target state '{target}' is defined in the same life_arc block. Available states: {available_states}"))]
UnknownLifeArcState {
life_arc: String,
state: String,
target: String,
available_states: String,
},
#[error("Trait '{field}' has value {value} which is outside valid range [{min}, {max}]")]
#[diagnostic(help("Normalized traits like '{field}' must have values between {min} and {max}. Try adjusting the value to fit within this range."))]
TraitOutOfRange {
field: String,
value: String,
min: f64,
max: f64,
},
#[error("Schedule blocks '{block1}' and '{block2}' overlap")]
#[diagnostic(help("Schedule blocks cannot overlap in time. Make sure each block ends before the next one begins. Block '{block1}' ends at {end1}, but '{block2}' starts at {start2}."))]
ScheduleOverlap {
block1: String,
block2: String,
end1: String,
start2: String,
},
#[error("Behavior tree '{tree}': unknown action '{action}'")]
#[diagnostic(help("The action '{action}' is not defined in the action registry. Make sure to define all custom actions, or check for typos."))]
UnknownBehaviorAction { tree: String, action: String },
#[error("Found {count} errors")]
#[diagnostic(help("Multiple errors were found during validation. Each error is listed below with its own help message. Fix them one by one, or use the hints to address common patterns."))]
MultipleErrors {
count: usize,
#[related]
errors: Vec<ResolveError>,
},
}
/// Result type for resolution operations
pub type Result<T> = std::result::Result<T, ResolveError>;
/// Collects multiple errors instead of failing fast
///
/// This allows the compiler to continue validating and report all errors at
/// once, giving users a complete picture of what needs to be fixed.
#[derive(Debug, Default)]
pub struct ErrorCollector {
errors: Vec<ResolveError>,
}
impl ErrorCollector {
/// Create a new error collector
pub fn new() -> Self {
Self { errors: Vec::new() }
}
/// Add an error to the collection
pub fn add(&mut self, error: ResolveError) {
// Don't add MultipleErrors to avoid nesting
match error {
| ResolveError::MultipleErrors { mut errors, .. } => {
self.errors.append(&mut errors);
},
| _ => {
self.errors.push(error);
},
}
}
/// Check if any errors were collected
pub fn has_errors(&self) -> bool {
!self.errors.is_empty()
}
/// Get the number of errors collected
pub fn len(&self) -> usize {
self.errors.len()
}
/// Check if no errors have been collected
pub fn is_empty(&self) -> bool {
self.errors.is_empty()
}
/// Convert to a Result, returning Ok if no errors or Err with all errors
pub fn into_result<T>(self, ok_value: T) -> Result<T> {
if self.errors.is_empty() {
Ok(ok_value)
} else if self.errors.len() == 1 {
// Single error - return it directly
Err(self.errors.into_iter().next().unwrap())
} else {
// Multiple errors - wrap them
Err(ResolveError::MultipleErrors {
count: self.errors.len(),
errors: self.errors,
})
}
}
/// Add a Result to the collector, extracting any error
pub fn add_result<T>(&mut self, result: Result<T>) -> Option<T> {
match result {
| Ok(value) => Some(value),
| Err(e) => {
self.add(e);
None
},
}
}
}
/// Resolve a parsed file into a validated, cross-referenced structure
pub fn resolve_file(file: File) -> Result<ResolvedFile> {
// Phase 1: Build name table
let _name_table = NameTable::from_file(&file)?;
// Phase 2: Resolve cross-references
// TODO: implement
// Phase 3: Validate semantics
// TODO: implement
// For now, just return a placeholder
Ok(ResolvedFile {
declarations: vec![],
})
}

581
src/resolve/names.rs Normal file
View File

@@ -0,0 +1,581 @@
//! Name resolution and qualified paths
//!
//! This module handles:
//! - Building a name table from parsed AST
//! - Mapping qualified paths to declarations
//! - Resolving `use` statements
//! - Fuzzy matching for suggestions
use indexmap::IndexMap;
use strsim::jaro_winkler;
use crate::{
resolve::{
ResolveError,
Result,
},
syntax::ast::{
Declaration,
File,
Span,
},
};
/// A qualified path like `characters::Martha` or `behaviors::WorkAtBakery`
pub type QualifiedPath = Vec<String>;
/// Kind of declaration
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DeclKind {
Character,
Template,
LifeArc,
Schedule,
Behavior,
Institution,
Relationship,
Location,
Species,
Enum,
}
/// Entry in the name table
#[derive(Debug, Clone)]
pub struct NameEntry {
pub kind: DeclKind,
pub qualified_path: QualifiedPath,
pub span: Span,
/// Index into the original declarations vector
pub decl_index: usize,
}
/// Name table mapping qualified paths to declarations
#[derive(Debug, Clone)]
pub struct NameTable {
/// Map from qualified path to entry
entries: IndexMap<QualifiedPath, NameEntry>,
/// Import statements (source path -> imported names)
imports: Vec<Import>,
}
/// An import statement
#[derive(Debug, Clone)]
pub struct Import {
pub source_path: QualifiedPath,
pub kind: ImportKind,
}
#[derive(Debug, Clone)]
pub enum ImportKind {
/// use foo::bar
Single(String),
/// use foo::{bar, baz}
Grouped(Vec<String>),
/// use foo::*
Wildcard,
}
impl NameTable {
/// Create a new empty name table
pub fn new() -> Self {
Self {
entries: IndexMap::new(),
imports: Vec::new(),
}
}
/// Build a name table from a parsed file
pub fn from_file(file: &File) -> Result<Self> {
let mut table = Self::new();
// First pass: collect imports
for decl in &file.declarations {
if let Declaration::Use(use_decl) = decl {
let import = Import {
source_path: use_decl.path.clone(),
kind: match &use_decl.kind {
| crate::syntax::ast::UseKind::Single => {
// For single import, the last component is the name
let name = use_decl
.path
.last()
.ok_or_else(|| ResolveError::ValidationError {
message: "Empty use path".to_string(),
help: Some("Use statements must have at least one path component. For example: 'use characters::Martha;'".to_string()),
})?
.clone();
ImportKind::Single(name)
},
| crate::syntax::ast::UseKind::Grouped(names) => {
ImportKind::Grouped(names.clone())
},
| crate::syntax::ast::UseKind::Wildcard => ImportKind::Wildcard,
},
};
table.imports.push(import);
}
}
// Second pass: register declarations
for (index, decl) in file.declarations.iter().enumerate() {
let (name, kind, span) = match decl {
| Declaration::Use(_) => continue,
| Declaration::Character(c) => {
(c.name.clone(), DeclKind::Character, c.span.clone())
},
| Declaration::Template(t) => (t.name.clone(), DeclKind::Template, t.span.clone()),
| Declaration::LifeArc(l) => (l.name.clone(), DeclKind::LifeArc, l.span.clone()),
| Declaration::Schedule(s) => (s.name.clone(), DeclKind::Schedule, s.span.clone()),
| Declaration::Behavior(b) => (b.name.clone(), DeclKind::Behavior, b.span.clone()),
| Declaration::Institution(i) => {
(i.name.clone(), DeclKind::Institution, i.span.clone())
},
| Declaration::Relationship(r) => {
(r.name.clone(), DeclKind::Relationship, r.span.clone())
},
| Declaration::Location(l) => (l.name.clone(), DeclKind::Location, l.span.clone()),
| Declaration::Species(s) => (s.name.clone(), DeclKind::Species, s.span.clone()),
| Declaration::Enum(e) => (e.name.clone(), DeclKind::Enum, e.span.clone()),
};
// For now, qualified path is just the name
// In a multi-file system, this would include directory structure
let qualified_path = vec![name.clone()];
// Check for duplicates
if let Some(existing) = table.entries.get(&qualified_path) {
return Err(ResolveError::DuplicateDefinition {
name,
first_location: (existing.span.start, existing.span.end),
second_location: (span.start, span.end),
});
}
table.entries.insert(
qualified_path.clone(),
NameEntry {
kind,
qualified_path,
span,
decl_index: index,
},
);
}
Ok(table)
}
/// Look up a name in the table
pub fn lookup(&self, path: &[String]) -> Option<&NameEntry> {
self.entries.get(path)
}
/// Find the best fuzzy match for a name
pub fn find_suggestion(&self, name: &str) -> Option<String> {
let mut best_match: Option<(String, f64)> = None;
for path in self.entries.keys() {
// Compare against the last component of the path
let component = path.last()?;
let score = jaro_winkler(name, component);
if score > 0.8 {
match &best_match {
| None => best_match = Some((component.clone(), score)),
| Some((_, best_score)) if score > *best_score => {
best_match = Some((component.clone(), score));
},
| _ => {},
}
}
}
best_match.map(|(name, _)| name)
}
/// Get all entries of a specific kind
pub fn entries_of_kind(&self, kind: DeclKind) -> impl Iterator<Item = &NameEntry> {
self.entries
.values()
.filter(move |entry| entry.kind == kind)
}
/// Get all entries
pub fn all_entries(&self) -> impl Iterator<Item = &NameEntry> {
self.entries.values()
}
/// Resolve a name, considering imports
///
/// This tries to resolve a simple name like "Martha" by:
/// 1. Looking in the local table
/// 2. Checking if it's explicitly imported
/// 3. Checking if it matches a wildcard import
pub fn resolve_name(&self, name: &str) -> Option<&NameEntry> {
// First try direct lookup (fully qualified or local)
if let Some(entry) = self.lookup(&[name.to_string()]) {
return Some(entry);
}
// Then try imports
for import in &self.imports {
match &import.kind {
| ImportKind::Single(imported_name) if imported_name == name => {
// Found in single import
return self.lookup(&import.source_path);
},
| ImportKind::Grouped(names) if names.contains(&name.to_string()) => {
// Found in grouped import
let mut path = import.source_path.clone();
path.push(name.to_string());
return self.lookup(&path);
},
| ImportKind::Wildcard => {
// Try appending the name to the import path
let mut path = import.source_path.clone();
path.push(name.to_string());
if let Some(entry) = self.lookup(&path) {
return Some(entry);
}
},
| _ => {},
}
}
None
}
/// Get all imports
pub fn imports(&self) -> &[Import] {
&self.imports
}
/// Merge another name table into this one
///
/// This is used to combine name tables from multiple files into a single
/// project-wide table. Returns an error if there are duplicate definitions.
pub fn merge(&mut self, other: NameTable) -> Result<()> {
// Merge imports
self.imports.extend(other.imports);
// Merge entries, checking for duplicates
for (path, entry) in other.entries {
if let Some(existing) = self.entries.get(&path) {
// Found a duplicate - extract the name from the path
let name = path.last().unwrap_or(&String::new()).clone();
return Err(ResolveError::DuplicateDefinition {
name,
first_location: (existing.span.start, existing.span.end),
second_location: (entry.span.start, entry.span.end),
});
}
self.entries.insert(path, entry);
}
Ok(())
}
/// Build a combined name table from multiple files
///
/// This creates name tables for each file and merges them together,
/// detecting any duplicate definitions across files.
pub fn from_files(files: &[File]) -> Result<Self> {
let mut combined = NameTable::new();
for file in files {
let table = NameTable::from_file(file)?;
combined.merge(table)?;
}
Ok(combined)
}
}
impl Default for NameTable {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::ast::{
Character,
Template,
};
#[test]
fn test_name_table_basic() {
let file = File {
declarations: vec![
Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
}),
Declaration::Template(Template {
name: "Person".to_string(),
fields: vec![],
strict: false,
includes: vec![],
span: Span::new(20, 30),
}),
],
};
let table = NameTable::from_file(&file).unwrap();
assert!(table.lookup(&["Martha".to_string()]).is_some());
assert!(table.lookup(&["Person".to_string()]).is_some());
assert!(table.lookup(&["Unknown".to_string()]).is_none());
}
#[test]
fn test_duplicate_detection() {
let file = File {
declarations: vec![
Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
}),
Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(20, 30),
}),
],
};
let result = NameTable::from_file(&file);
assert!(result.is_err());
}
#[test]
fn test_fuzzy_matching() {
let file = File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
};
let table = NameTable::from_file(&file).unwrap();
// "Marhta" (typo) should suggest "Martha"
let suggestion = table.find_suggestion("Marhta");
assert_eq!(suggestion, Some("Martha".to_string()));
}
#[test]
fn test_single_import() {
use crate::syntax::ast::{
UseDecl,
UseKind,
};
let file = File {
declarations: vec![
Declaration::Use(UseDecl {
path: vec!["characters".to_string(), "Martha".to_string()],
kind: UseKind::Single,
span: Span::new(0, 10),
}),
Declaration::Character(Character {
name: "characters".to_string(),
fields: vec![],
template: None,
span: Span::new(20, 30),
}),
],
};
let table = NameTable::from_file(&file).unwrap();
assert_eq!(table.imports.len(), 1);
match &table.imports[0].kind {
| ImportKind::Single(name) => assert_eq!(name, "Martha"),
| _ => panic!("Expected single import"),
}
}
#[test]
fn test_grouped_import() {
use crate::syntax::ast::{
UseDecl,
UseKind,
};
let file = File {
declarations: vec![Declaration::Use(UseDecl {
path: vec!["characters".to_string()],
kind: UseKind::Grouped(vec!["Martha".to_string(), "David".to_string()]),
span: Span::new(0, 10),
})],
};
let table = NameTable::from_file(&file).unwrap();
assert_eq!(table.imports.len(), 1);
match &table.imports[0].kind {
| ImportKind::Grouped(names) => {
assert_eq!(names.len(), 2);
assert!(names.contains(&"Martha".to_string()));
assert!(names.contains(&"David".to_string()));
},
| _ => panic!("Expected grouped import"),
}
}
#[test]
fn test_wildcard_import() {
use crate::syntax::ast::{
UseDecl,
UseKind,
};
let file = File {
declarations: vec![Declaration::Use(UseDecl {
path: vec!["characters".to_string()],
kind: UseKind::Wildcard,
span: Span::new(0, 10),
})],
};
let table = NameTable::from_file(&file).unwrap();
assert_eq!(table.imports.len(), 1);
match &table.imports[0].kind {
| ImportKind::Wildcard => {},
| _ => panic!("Expected wildcard import"),
}
}
#[test]
fn test_merge_tables() {
let file1 = File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
};
let file2 = File {
declarations: vec![Declaration::Character(Character {
name: "David".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
};
let mut table1 = NameTable::from_file(&file1).unwrap();
let table2 = NameTable::from_file(&file2).unwrap();
table1.merge(table2).unwrap();
// Both names should be in the merged table
assert!(table1.lookup(&["Martha".to_string()]).is_some());
assert!(table1.lookup(&["David".to_string()]).is_some());
}
#[test]
fn test_merge_detects_duplicates() {
let file1 = File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
};
let file2 = File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(20, 30),
})],
};
let mut table1 = NameTable::from_file(&file1).unwrap();
let table2 = NameTable::from_file(&file2).unwrap();
let result = table1.merge(table2);
assert!(result.is_err());
}
#[test]
fn test_from_files() {
let files = vec![
File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
},
File {
declarations: vec![Declaration::Character(Character {
name: "David".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
},
];
let table = NameTable::from_files(&files).unwrap();
assert!(table.lookup(&["Martha".to_string()]).is_some());
assert!(table.lookup(&["David".to_string()]).is_some());
}
#[test]
fn test_from_files_detects_duplicates() {
let files = vec![
File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(0, 10),
})],
},
File {
declarations: vec![Declaration::Character(Character {
name: "Martha".to_string(),
fields: vec![],
template: None,
span: Span::new(20, 30),
})],
},
];
let result = NameTable::from_files(&files);
assert!(result.is_err());
}
}

303
src/resolve/prop_tests.rs Normal file
View File

@@ -0,0 +1,303 @@
//! Property-based tests for the resolution engine
use proptest::prelude::*;
use crate::{
resolve::names::{
DeclKind,
NameTable,
},
syntax::ast::*,
};
// ===== Generators =====
fn valid_ident() -> impl Strategy<Value = String> {
"[a-zA-Z_][a-zA-Z0-9_]{0,20}".prop_filter("not a keyword", |s| {
!matches!(
s.as_str(),
"use" |
"character" |
"template" |
"life_arc" |
"schedule" |
"behavior" |
"institution" |
"relationship" |
"location" |
"species" |
"enum" |
"state" |
"on" |
"as" |
"self" |
"other" |
"remove" |
"append" |
"forall" |
"exists" |
"in" |
"where" |
"and" |
"or" |
"not" |
"is" |
"true" |
"false"
)
})
}
fn valid_character_decl() -> impl Strategy<Value = (String, Declaration)> {
valid_ident().prop_map(|name| {
let decl = Declaration::Character(Character {
name: name.clone(),
fields: vec![],
template: None,
span: Span::new(0, 10),
});
(name, decl)
})
}
fn valid_template_decl() -> impl Strategy<Value = (String, Declaration)> {
valid_ident().prop_map(|name| {
let decl = Declaration::Template(Template {
name: name.clone(),
fields: vec![],
strict: false,
includes: vec![],
span: Span::new(0, 10),
});
(name, decl)
})
}
fn valid_enum_decl() -> impl Strategy<Value = (String, Declaration)> {
(valid_ident(), prop::collection::vec(valid_ident(), 1..5)).prop_map(|(name, variants)| {
let decl = Declaration::Enum(EnumDecl {
name: name.clone(),
variants,
span: Span::new(0, 10),
});
(name, decl)
})
}
fn valid_use_single() -> impl Strategy<Value = Declaration> {
(valid_ident(), valid_ident()).prop_map(|(module, name)| {
Declaration::Use(UseDecl {
path: vec![module, name],
kind: UseKind::Single,
span: Span::new(0, 10),
})
})
}
fn valid_use_grouped() -> impl Strategy<Value = Declaration> {
(valid_ident(), prop::collection::vec(valid_ident(), 1..5)).prop_map(|(module, names)| {
Declaration::Use(UseDecl {
path: vec![module],
kind: UseKind::Grouped(names),
span: Span::new(0, 10),
})
})
}
fn valid_use_wildcard() -> impl Strategy<Value = Declaration> {
valid_ident().prop_map(|module| {
Declaration::Use(UseDecl {
path: vec![module],
kind: UseKind::Wildcard,
span: Span::new(0, 10),
})
})
}
// ===== Property Tests =====
proptest! {
#[test]
fn test_name_table_registers_all_declarations(
chars in prop::collection::vec(valid_character_decl(), 0..10)
) {
let declarations: Vec<_> = chars.iter().map(|(_, decl)| decl.clone()).collect();
let file = File { declarations };
let result = NameTable::from_file(&file);
if chars.is_empty() {
// Empty file should succeed
assert!(result.is_ok());
} else {
// Check if there are duplicates
let mut seen = std::collections::HashSet::new();
let has_duplicates = chars.iter().any(|(name, _)| !seen.insert(name));
if has_duplicates {
// Should fail with duplicate error
assert!(result.is_err());
} else {
// Should succeed and all names should be registered
let table = result.unwrap();
for (name, _) in &chars {
assert!(table.lookup(std::slice::from_ref(name)).is_some(),
"Name '{}' should be registered", name);
}
}
}
}
#[test]
fn test_duplicate_detection_always_fails(
name in valid_ident(),
count in 2usize..5
) {
let declarations: Vec<_> = (0..count).map(|i| {
Declaration::Character(Character {
name: name.clone(),
fields: vec![],
template: None,
span: Span::new(i * 10, i * 10 + 10),
})
}).collect();
let file = File { declarations };
let result = NameTable::from_file(&file);
// Should always fail with duplicate error
assert!(result.is_err());
}
#[test]
fn test_lookup_is_case_sensitive(name in valid_ident()) {
let file = File {
declarations: vec![
Declaration::Character(Character {
name: name.clone(),
fields: vec![],
template: None,
span: Span::new(0, 10),
}),
],
};
let table = NameTable::from_file(&file).unwrap();
// Original name should be found
assert!(table.lookup(std::slice::from_ref(&name)).is_some());
// Different case should not be found
let uppercase = name.to_uppercase();
if uppercase != name {
assert!(table.lookup(&[uppercase]).is_none());
}
}
#[test]
fn test_kind_filtering_works(
chars in prop::collection::vec(valid_character_decl(), 0..5),
templates in prop::collection::vec(valid_template_decl(), 0..5),
enums in prop::collection::vec(valid_enum_decl(), 0..5)
) {
let mut declarations = vec![];
declarations.extend(chars.iter().map(|(_, d)| d.clone()));
declarations.extend(templates.iter().map(|(_, d)| d.clone()));
declarations.extend(enums.iter().map(|(_, d)| d.clone()));
let file = File { declarations };
// Only proceed if no duplicates
let mut seen = std::collections::HashSet::new();
let has_duplicates = chars.iter().any(|(name, _)| !seen.insert(name))
|| templates.iter().any(|(name, _)| !seen.insert(name))
|| enums.iter().any(|(name, _)| !seen.insert(name));
if !has_duplicates {
let table = NameTable::from_file(&file).unwrap();
let char_count = table.entries_of_kind(DeclKind::Character).count();
let template_count = table.entries_of_kind(DeclKind::Template).count();
let enum_count = table.entries_of_kind(DeclKind::Enum).count();
assert_eq!(char_count, chars.len());
assert_eq!(template_count, templates.len());
assert_eq!(enum_count, enums.len());
}
}
#[test]
fn test_use_statements_are_collected(
uses in prop::collection::vec(
prop_oneof![
valid_use_single(),
valid_use_grouped(),
valid_use_wildcard(),
],
0..10
)
) {
let file = File { declarations: uses.clone() };
let table = NameTable::from_file(&file).unwrap();
assert_eq!(table.imports().len(), uses.len());
}
#[test]
fn test_fuzzy_matching_finds_close_names(
name in valid_ident().prop_filter("long enough", |s| s.len() > 3)
) {
let file = File {
declarations: vec![
Declaration::Character(Character {
name: name.clone(),
fields: vec![],
template: None,
span: Span::new(0, 10),
}),
],
};
let table = NameTable::from_file(&file).unwrap();
// Create a typo by swapping two adjacent characters
if name.len() >= 2 {
let mut chars: Vec<char> = name.chars().collect();
chars.swap(0, 1);
let typo: String = chars.into_iter().collect();
// Should suggest the original name
if let Some(suggestion) = table.find_suggestion(&typo) {
assert_eq!(suggestion, name);
}
}
}
#[test]
fn test_mixed_declarations_and_imports(
chars in prop::collection::vec(valid_character_decl(), 1..5),
uses in prop::collection::vec(valid_use_single(), 0..3)
) {
let mut declarations = uses;
declarations.extend(chars.iter().map(|(_, d)| d.clone()));
let file = File { declarations };
// Check for duplicates
let mut seen = std::collections::HashSet::new();
let has_duplicates = chars.iter().any(|(name, _)| !seen.insert(name));
if !has_duplicates {
let table = NameTable::from_file(&file).unwrap();
// All characters should be registered
for (name, _) in &chars {
assert!(table.lookup(std::slice::from_ref(name)).is_some());
}
}
}
}

138
src/resolve/types.rs Normal file
View File

@@ -0,0 +1,138 @@
//! Resolved types - what consumers see after resolution
//!
//! These types are similar to AST types but represent fully resolved,
//! validated entities with all cross-references resolved.
use crate::syntax::ast::{
Field,
Span,
};
/// A fully resolved file with all cross-references resolved
#[derive(Debug, Clone)]
pub struct ResolvedFile {
pub declarations: Vec<ResolvedDeclaration>,
}
/// A resolved top-level declaration
#[derive(Debug, Clone)]
pub enum ResolvedDeclaration {
Character(ResolvedCharacter),
Template(ResolvedTemplate),
LifeArc(ResolvedLifeArc),
Schedule(ResolvedSchedule),
Behavior(ResolvedBehavior),
Institution(ResolvedInstitution),
Relationship(ResolvedRelationship),
Location(ResolvedLocation),
Species(ResolvedSpecies),
Enum(ResolvedEnum),
}
/// A resolved character with all template overrides applied
#[derive(Debug, Clone)]
pub struct ResolvedCharacter {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
/// Qualified path (e.g., characters::Martha)
pub qualified_path: Vec<String>,
}
/// A resolved template definition
#[derive(Debug, Clone)]
pub struct ResolvedTemplate {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved life arc state machine
#[derive(Debug, Clone)]
pub struct ResolvedLifeArc {
pub name: String,
pub states: Vec<ResolvedArcState>,
pub span: Span,
pub qualified_path: Vec<String>,
}
#[derive(Debug, Clone)]
pub struct ResolvedArcState {
pub name: String,
pub transitions: Vec<ResolvedTransition>,
pub span: Span,
}
#[derive(Debug, Clone)]
pub struct ResolvedTransition {
pub to: String,
/// Validated - target state exists
pub target_state_index: usize,
pub condition: crate::syntax::ast::Expr,
pub span: Span,
}
/// A resolved schedule
#[derive(Debug, Clone)]
pub struct ResolvedSchedule {
pub name: String,
pub blocks: Vec<crate::syntax::ast::ScheduleBlock>,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved behavior tree
#[derive(Debug, Clone)]
pub struct ResolvedBehavior {
pub name: String,
pub root: crate::syntax::ast::BehaviorNode,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved institution
#[derive(Debug, Clone)]
pub struct ResolvedInstitution {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved relationship with both sides validated
#[derive(Debug, Clone)]
pub struct ResolvedRelationship {
pub name: String,
pub participants: Vec<crate::syntax::ast::Participant>,
pub fields: Vec<Field>,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved location
#[derive(Debug, Clone)]
pub struct ResolvedLocation {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved species
#[derive(Debug, Clone)]
pub struct ResolvedSpecies {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
pub qualified_path: Vec<String>,
}
/// A resolved enum
#[derive(Debug, Clone)]
pub struct ResolvedEnum {
pub name: String,
pub variants: Vec<String>,
pub span: Span,
pub qualified_path: Vec<String>,
}

501
src/resolve/validate.rs Normal file
View File

@@ -0,0 +1,501 @@
//! Semantic validation for Storybook entities
//!
//! This module validates semantic constraints that can't be checked during
//! parsing:
//! - Reserved keyword conflicts in field names
//! - Trait value ranges
//! - Schedule time overlaps
//! - Life arc transition validity
//! - Behavior tree action registry checks
//! - Relationship bond values (0.0 .. 1.0)
use std::collections::HashSet;
use crate::{
resolve::{
ErrorCollector,
ResolveError,
Result,
},
syntax::ast::*,
};
/// List of reserved keywords that cannot be used as field names
const RESERVED_KEYWORDS: &[&str] = &[
// Top-level declaration keywords
"character",
"template",
"life_arc",
"schedule",
"behavior",
"institution",
"relationship",
"location",
"species",
"enum",
// Statement keywords
"use",
"state",
"on",
"as",
"remove",
"append",
"strict",
"include",
"from",
// Expression keywords
"self",
"other",
"forall",
"exists",
"in",
"where",
"and",
"or",
"not",
"is",
"true",
"false",
];
/// Validate that field names don't conflict with reserved keywords
pub fn validate_no_reserved_keywords(fields: &[Field], collector: &mut ErrorCollector) {
for field in fields {
if RESERVED_KEYWORDS.contains(&field.name.as_str()) {
collector.add(ResolveError::ValidationError {
message: format!(
"Field name '{}' is a reserved keyword and cannot be used",
field.name
),
help: Some(format!(
"The name '{}' is reserved by the Storybook language. Try using a different name like '{}Type', '{}Value', or 'my{}'.",
field.name,
field.name,
field.name,
field.name.chars().next().unwrap_or('x').to_uppercase().collect::<String>() + &field.name[1..]
)),
});
}
}
}
/// Validate trait values are within valid ranges
pub fn validate_trait_ranges(fields: &[Field], collector: &mut ErrorCollector) {
for field in fields {
match &field.value {
| Value::Float(f) => {
// Normalized trait values should be 0.0 .. 1.0
if (field.name.ends_with("_normalized") ||
field.name == "bond" ||
field.name == "trust" ||
field.name == "love") &&
!(0.0..=1.0).contains(f)
{
collector.add(ResolveError::TraitOutOfRange {
field: field.name.clone(),
value: f.to_string(),
min: 0.0,
max: 1.0,
});
}
},
| Value::Int(i) => {
// Age should be reasonable
if field.name == "age" && (*i < 0 || *i > 150) {
collector.add(ResolveError::TraitOutOfRange {
field: "age".to_string(),
value: i.to_string(),
min: 0.0,
max: 150.0,
});
}
},
| _ => {},
}
}
}
/// Validate relationship bond values are in [0.0, 1.0]
pub fn validate_relationship_bonds(relationships: &[Relationship], collector: &mut ErrorCollector) {
for rel in relationships {
for field in &rel.fields {
if field.name == "bond" {
if let Value::Float(f) = field.value {
if !(0.0..=1.0).contains(&f) {
collector.add(ResolveError::TraitOutOfRange {
field: "bond".to_string(),
value: f.to_string(),
min: 0.0,
max: 1.0,
});
}
}
}
}
// Validate self/other blocks if present
for participant in &rel.participants {
if let Some(ref self_fields) = participant.self_block {
validate_trait_ranges(self_fields, collector);
}
if let Some(ref other_fields) = participant.other_block {
validate_trait_ranges(other_fields, collector);
}
}
}
}
/// Validate schedule blocks don't overlap in time
pub fn validate_schedule_overlaps(schedule: &Schedule, collector: &mut ErrorCollector) {
// Sort blocks by start time
let mut sorted_blocks: Vec<_> = schedule.blocks.iter().collect();
sorted_blocks.sort_by_key(|b| (b.start.hour as u32) * 60 + (b.start.minute as u32));
// Check for overlaps
for i in 0..sorted_blocks.len() {
for j in (i + 1)..sorted_blocks.len() {
let block1 = sorted_blocks[i];
let block2 = sorted_blocks[j];
let end1 = (block1.end.hour as u32) * 60 + (block1.end.minute as u32);
let start2 = (block2.start.hour as u32) * 60 + (block2.start.minute as u32);
// Check if blocks overlap
if end1 > start2 {
collector.add(ResolveError::ScheduleOverlap {
block1: format!(
"{} ({}:{:02}-{}:{:02})",
block1.activity,
block1.start.hour,
block1.start.minute,
block1.end.hour,
block1.end.minute
),
block2: format!(
"{} ({}:{:02}-{}:{:02})",
block2.activity,
block2.start.hour,
block2.start.minute,
block2.end.hour,
block2.end.minute
),
end1: format!("{}:{:02}", block1.end.hour, block1.end.minute),
start2: format!("{}:{:02}", block2.start.hour, block2.start.minute),
});
}
}
}
}
/// Validate life arc state machine has valid transitions
pub fn validate_life_arc_transitions(life_arc: &LifeArc, collector: &mut ErrorCollector) {
// Collect all state names
let mut state_names = HashSet::new();
for state in &life_arc.states {
state_names.insert(state.name.clone());
}
// Validate all transitions point to valid states
for state in &life_arc.states {
for transition in &state.transitions {
if !state_names.contains(&transition.to) {
let available_states = state_names
.iter()
.map(|s| format!("'{}'", s))
.collect::<Vec<_>>()
.join(", ");
collector.add(ResolveError::UnknownLifeArcState {
life_arc: life_arc.name.clone(),
state: state.name.clone(),
target: transition.to.clone(),
available_states,
});
}
}
}
// Warn if states have no outgoing transitions (terminal states)
// This is not an error, just informational
}
/// Validate behavior tree actions are known
///
/// If action_registry is empty, skips validation (no schema provided).
pub fn validate_behavior_tree_actions(
tree: &Behavior,
action_registry: &HashSet<String>,
collector: &mut ErrorCollector,
) {
// Skip validation if no action schema was provided
if action_registry.is_empty() {
return;
}
validate_tree_node_actions(&tree.root, action_registry, &tree.name, collector)
}
fn validate_tree_node_actions(
node: &BehaviorNode,
action_registry: &HashSet<String>,
tree_name: &str,
collector: &mut ErrorCollector,
) {
match node {
| BehaviorNode::Sequence(children) | BehaviorNode::Selector(children) => {
for child in children {
validate_tree_node_actions(child, action_registry, tree_name, collector);
}
},
| BehaviorNode::Action(name, _params) => {
if !action_registry.contains(name) {
collector.add(ResolveError::UnknownBehaviorAction {
tree: tree_name.to_string(),
action: name.clone(),
});
}
},
| BehaviorNode::Condition(_) => {
// Conditions are validated separately via expression validation
},
| BehaviorNode::Decorator(_name, child) => {
validate_tree_node_actions(child, action_registry, tree_name, collector);
},
| BehaviorNode::SubTree(_path) => {
// SubTree references validated separately
},
}
}
/// Validate an entire file
///
/// Collects all validation errors and returns them together instead of failing
/// fast.
pub fn validate_file(file: &File, action_registry: &HashSet<String>) -> Result<()> {
let mut collector = ErrorCollector::new();
for decl in &file.declarations {
match decl {
| Declaration::Character(c) => {
validate_trait_ranges(&c.fields, &mut collector);
},
| Declaration::Relationship(r) => {
validate_relationship_bonds(std::slice::from_ref(r), &mut collector);
},
| Declaration::Schedule(s) => {
validate_schedule_overlaps(s, &mut collector);
},
| Declaration::LifeArc(la) => {
validate_life_arc_transitions(la, &mut collector);
},
| Declaration::Behavior(bt) => {
validate_behavior_tree_actions(bt, action_registry, &mut collector);
},
| _ => {
// Other declarations don't need validation yet
},
}
}
collector.into_result(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_trait_ranges() {
let fields = vec![
Field {
name: "bond".to_string(),
value: Value::Float(0.8),
span: Span::new(0, 10),
},
Field {
name: "age".to_string(),
value: Value::Int(30),
span: Span::new(0, 10),
},
];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_invalid_bond_value_too_high() {
let fields = vec![Field {
name: "bond".to_string(),
value: Value::Float(1.5),
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_invalid_bond_value_negative() {
let fields = vec![Field {
name: "bond".to_string(),
value: Value::Float(-0.1),
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_invalid_age_negative() {
let fields = vec![Field {
name: "age".to_string(),
value: Value::Int(-5),
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_invalid_age_too_high() {
let fields = vec![Field {
name: "age".to_string(),
value: Value::Int(200),
span: Span::new(0, 10),
}];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_valid_relationship_bond() {
let relationship = Relationship {
name: "Test".to_string(),
participants: vec![],
fields: vec![Field {
name: "bond".to_string(),
value: Value::Float(0.9),
span: Span::new(0, 10),
}],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_relationship_bonds(&[relationship], &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_invalid_relationship_bond() {
let relationship = Relationship {
name: "Test".to_string(),
participants: vec![],
fields: vec![Field {
name: "bond".to_string(),
value: Value::Float(1.2),
span: Span::new(0, 10),
}],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_relationship_bonds(&[relationship], &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_life_arc_valid_transitions() {
let life_arc = LifeArc {
name: "Test".to_string(),
states: vec![
ArcState {
name: "start".to_string(),
transitions: vec![Transition {
to: "end".to_string(),
condition: Expr::Identifier(vec!["ready".to_string()]),
span: Span::new(0, 10),
}],
span: Span::new(0, 50),
},
ArcState {
name: "end".to_string(),
transitions: vec![],
span: Span::new(50, 100),
},
],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_life_arc_transitions(&life_arc, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_life_arc_invalid_transition() {
let life_arc = LifeArc {
name: "Test".to_string(),
states: vec![ArcState {
name: "start".to_string(),
transitions: vec![Transition {
to: "nonexistent".to_string(),
condition: Expr::Identifier(vec!["ready".to_string()]),
span: Span::new(0, 10),
}],
span: Span::new(0, 50),
}],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_life_arc_transitions(&life_arc, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_behavior_tree_valid_actions() {
let mut registry = HashSet::new();
registry.insert("walk".to_string());
registry.insert("eat".to_string());
let tree = Behavior {
name: "Test".to_string(),
root: BehaviorNode::Sequence(vec![
BehaviorNode::Action("walk".to_string(), vec![]),
BehaviorNode::Action("eat".to_string(), vec![]),
]),
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_behavior_tree_actions(&tree, &registry, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_behavior_tree_invalid_action() {
// Create a registry with some actions (but not "unknown_action")
let mut registry = HashSet::new();
registry.insert("walk".to_string());
registry.insert("work".to_string());
let tree = Behavior {
name: "Test".to_string(),
root: BehaviorNode::Action("unknown_action".to_string(), vec![]),
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_behavior_tree_actions(&tree, &registry, &mut collector);
assert!(collector.has_errors());
}
}

View File

@@ -0,0 +1,226 @@
//! Property tests for semantic validation
use std::collections::HashSet;
use proptest::prelude::*;
use crate::{
resolve::{
validate::*,
ErrorCollector,
},
syntax::ast::*,
};
// ===== Generators =====
fn valid_bond_field() -> impl Strategy<Value = Field> {
(0.0..=1.0).prop_map(|f| Field {
name: "bond".to_string(),
value: Value::Float(f),
span: Span::new(0, 10),
})
}
fn invalid_bond_field() -> impl Strategy<Value = Field> {
prop_oneof![
(-100.0..0.0).prop_map(|f| Field {
name: "bond".to_string(),
value: Value::Float(f),
span: Span::new(0, 10),
}),
(1.0..100.0).prop_map(|f| Field {
name: "bond".to_string(),
value: Value::Float(f),
span: Span::new(0, 10),
}),
]
}
fn valid_age_field() -> impl Strategy<Value = Field> {
(0i64..=150).prop_map(|age| Field {
name: "age".to_string(),
value: Value::Int(age),
span: Span::new(0, 10),
})
}
fn invalid_age_field() -> impl Strategy<Value = Field> {
prop_oneof![
(-100i64..-1).prop_map(|age| Field {
name: "age".to_string(),
value: Value::Int(age),
span: Span::new(0, 10),
}),
(151i64..300).prop_map(|age| Field {
name: "age".to_string(),
value: Value::Int(age),
span: Span::new(0, 10),
}),
]
}
// ===== Property Tests =====
proptest! {
#[test]
fn test_valid_bond_always_passes(field in valid_bond_field()) {
let fields = vec![field];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_invalid_bond_always_fails(field in invalid_bond_field()) {
let fields = vec![field];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_valid_age_always_passes(field in valid_age_field()) {
let fields = vec![field];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_invalid_age_always_fails(field in invalid_age_field()) {
let fields = vec![field];
let mut collector = ErrorCollector::new();
validate_trait_ranges(&fields, &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_bond_exact_bounds(f in 0.0f64..=1.0) {
let field = Field {
name: "bond".to_string(),
value: Value::Float(f),
span: Span::new(0, 10),
};
let mut collector = ErrorCollector::new();
validate_trait_ranges(&[field], &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_relationship_bond_in_range_passes(
bond_value in 0.0f64..=1.0
) {
let rel = Relationship {
name: "Test".to_string(),
participants: vec![],
fields: vec![Field {
name: "bond".to_string(),
value: Value::Float(bond_value),
span: Span::new(0, 10),
}],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_relationship_bonds(&[rel], &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_relationship_bond_out_of_range_fails(
bond_value in prop_oneof![(-100.0..0.0), (1.0..100.0)]
) {
let rel = Relationship {
name: "Test".to_string(),
participants: vec![],
fields: vec![Field {
name: "bond".to_string(),
value: Value::Float(bond_value),
span: Span::new(0, 10),
}],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_relationship_bonds(&[rel], &mut collector);
assert!(collector.has_errors());
}
#[test]
fn test_life_arc_with_valid_transitions_passes(
state1_name in "[a-z]{3,10}",
state2_name in "[a-z]{3,10}"
) {
if state1_name == state2_name {
return Ok(());
}
let life_arc = LifeArc {
name: "Test".to_string(),
states: vec![
ArcState {
name: state1_name.clone(),
transitions: vec![Transition {
to: state2_name.clone(),
condition: Expr::BoolLit(true),
span: Span::new(0, 10),
}],
span: Span::new(0, 50),
},
ArcState {
name: state2_name,
transitions: vec![],
span: Span::new(50, 100),
},
],
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_life_arc_transitions(&life_arc, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_known_action_always_passes(
tree_name in "[a-z]{3,10}",
action_name in "[a-z]{3,10}"
) {
let mut registry = HashSet::new();
registry.insert(action_name.clone());
let tree = Behavior {
name: tree_name,
root: BehaviorNode::Action(action_name, vec![]),
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_behavior_tree_actions(&tree, &registry, &mut collector);
assert!(!collector.has_errors());
}
#[test]
fn test_unknown_action_always_fails(
tree_name in "[a-z]{3,10}",
action_name in "[a-z]{3,10}"
) {
// Create a registry with some actions, but ensure generated action isn't in it
// Using uppercase letters ensures it won't match the generated lowercase action
let mut registry = HashSet::new();
registry.insert("WALK".to_string());
registry.insert("WORK".to_string());
registry.insert("EAT".to_string());
let tree = Behavior {
name: tree_name,
root: BehaviorNode::Action(action_name, vec![]),
span: Span::new(0, 100),
};
let mut collector = ErrorCollector::new();
validate_behavior_tree_actions(&tree, &registry, &mut collector);
assert!(collector.has_errors());
}
}

282
src/syntax/ast.rs Normal file
View File

@@ -0,0 +1,282 @@
/// Source location for error reporting
#[derive(Debug, Clone, PartialEq)]
pub struct Span {
pub start: usize,
pub end: usize,
}
impl Span {
pub fn new(start: usize, end: usize) -> Self {
Self { start, end }
}
}
/// Top-level file containing multiple declarations
#[derive(Debug, Clone, PartialEq)]
pub struct File {
pub declarations: Vec<Declaration>,
}
/// Any top-level declaration
#[derive(Debug, Clone, PartialEq)]
pub enum Declaration {
Use(UseDecl),
Character(Character),
Template(Template),
LifeArc(LifeArc),
Schedule(Schedule),
Behavior(Behavior),
Institution(Institution),
Relationship(Relationship),
Location(Location),
Species(Species),
Enum(EnumDecl),
}
/// Use statement for importing definitions
#[derive(Debug, Clone, PartialEq)]
pub struct UseDecl {
pub path: Vec<String>,
pub kind: UseKind,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub enum UseKind {
Single, // use foo::bar
Grouped(Vec<String>), // use foo::{bar, baz}
Wildcard, // use foo::*
}
/// Character definition
#[derive(Debug, Clone, PartialEq)]
pub struct Character {
pub name: String,
pub fields: Vec<Field>,
pub template: Option<Vec<String>>, // `from Template1, Template2`
pub span: Span,
}
/// Template definition (like Character but allows range values)
#[derive(Debug, Clone, PartialEq)]
pub struct Template {
pub name: String,
pub fields: Vec<Field>,
pub strict: bool,
pub includes: Vec<String>,
pub span: Span,
}
/// Field in a structured definition
#[derive(Debug, Clone, PartialEq)]
pub struct Field {
pub name: String,
pub value: Value,
pub span: Span,
}
/// Field value types
#[derive(Debug, Clone, PartialEq)]
pub enum Value {
Int(i64),
Float(f64),
String(String),
Bool(bool),
Range(Box<Value>, Box<Value>), // For templates: 20..40
Time(Time),
Duration(Duration),
Identifier(Vec<String>), // Qualified path reference
List(Vec<Value>),
Object(Vec<Field>),
ProseBlock(ProseBlock),
Override(Override),
}
/// Time literal (HH:MM or HH:MM:SS)
#[derive(Debug, Clone, PartialEq)]
pub struct Time {
pub hour: u8,
pub minute: u8,
pub second: u8,
}
/// Duration literal (e.g., 2h30m)
#[derive(Debug, Clone, PartialEq)]
pub struct Duration {
pub hours: u32,
pub minutes: u32,
pub seconds: u32,
}
/// Prose block with tag
#[derive(Debug, Clone, PartialEq)]
pub struct ProseBlock {
pub tag: String,
pub content: String,
pub span: Span,
}
/// Override specification for template instantiation
#[derive(Debug, Clone, PartialEq)]
pub struct Override {
pub base: Vec<String>, // Template path
pub overrides: Vec<OverrideOp>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub enum OverrideOp {
Set(Field), // field: value
Remove(String), // remove field
Append(Field), // append field
}
/// Life arc state machine
#[derive(Debug, Clone, PartialEq)]
pub struct LifeArc {
pub name: String,
pub states: Vec<ArcState>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ArcState {
pub name: String,
pub transitions: Vec<Transition>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Transition {
pub to: String,
pub condition: Expr,
pub span: Span,
}
/// Schedule definition
#[derive(Debug, Clone, PartialEq)]
pub struct Schedule {
pub name: String,
pub blocks: Vec<ScheduleBlock>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ScheduleBlock {
pub start: Time,
pub end: Time,
pub activity: String,
pub span: Span,
}
/// Behavior tree definition
#[derive(Debug, Clone, PartialEq)]
pub struct Behavior {
pub name: String,
pub root: BehaviorNode,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub enum BehaviorNode {
Selector(Vec<BehaviorNode>), // ? operator
Sequence(Vec<BehaviorNode>), // > operator (context-dependent)
Condition(Expr),
Action(String, Vec<Field>), // Action name + parameters
Decorator(String, Box<BehaviorNode>),
SubTree(Vec<String>), // Reference to another behavior
}
/// Institution definition
#[derive(Debug, Clone, PartialEq)]
pub struct Institution {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
}
/// Relationship definition
#[derive(Debug, Clone, PartialEq)]
pub struct Relationship {
pub name: String,
pub participants: Vec<Participant>,
pub fields: Vec<Field>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Participant {
pub role: Option<String>, // "as parent"
pub name: Vec<String>, // Qualified path
pub self_block: Option<Vec<Field>>,
pub other_block: Option<Vec<Field>>,
pub span: Span,
}
/// Location definition
#[derive(Debug, Clone, PartialEq)]
pub struct Location {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
}
/// Species definition
#[derive(Debug, Clone, PartialEq)]
pub struct Species {
pub name: String,
pub fields: Vec<Field>,
pub span: Span,
}
/// Enum definition
#[derive(Debug, Clone, PartialEq)]
pub struct EnumDecl {
pub name: String,
pub variants: Vec<String>,
pub span: Span,
}
/// Expression AST for conditions and queries
#[derive(Debug, Clone, PartialEq)]
pub enum Expr {
IntLit(i64),
FloatLit(f64),
StringLit(String),
BoolLit(bool),
Identifier(Vec<String>),
FieldAccess(Box<Expr>, String),
Comparison(Box<Expr>, CompOp, Box<Expr>),
Logical(Box<Expr>, LogicalOp, Box<Expr>),
Unary(UnaryOp, Box<Expr>),
Quantifier(QuantifierKind, String, Box<Expr>, Box<Expr>), /* forall/exists x in collection:
* predicate */
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum CompOp {
Eq, // ==
Ne, // !=
Lt, // <
Le, // <=
Gt, // >
Ge, // >=
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum LogicalOp {
And,
Or,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum UnaryOp {
Not,
Neg,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum QuantifierKind {
ForAll,
Exists,
}

424
src/syntax/lexer.rs Normal file
View File

@@ -0,0 +1,424 @@
use std::fmt;
use logos::Logos;
/// Token types for the Storybook language
#[derive(Logos, Debug, Clone, PartialEq)]
#[logos(skip r"[ \t\n\f]+")] // Skip whitespace
#[logos(skip r"//[^\n]*")] // Skip line comments
#[logos(skip r"/\*([^*]|\*[^/])*\*/")] // Skip block comments
pub enum Token {
// Keywords
#[token("use")]
Use,
#[token("character")]
Character,
#[token("template")]
Template,
#[token("life_arc")]
LifeArc,
#[token("schedule")]
Schedule,
#[token("behavior")]
Behavior,
#[token("institution")]
Institution,
#[token("relationship")]
Relationship,
#[token("location")]
Location,
#[token("species")]
Species,
#[token("enum")]
Enum,
#[token("state")]
State,
#[token("on")]
On,
#[token("as")]
As,
#[token("self")]
SelfKw,
#[token("other")]
Other,
#[token("remove")]
Remove,
#[token("append")]
Append,
#[token("forall")]
ForAll,
#[token("exists")]
Exists,
#[token("in")]
In,
#[token("where")]
Where,
#[token("and")]
And,
#[token("or")]
Or,
#[token("not")]
Not,
#[token("strict")]
Strict,
#[token("include")]
Include,
#[token("from")]
From,
#[token("is")]
Is,
#[token("true")]
True,
#[token("false")]
False,
// Identifiers and literals
#[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string())]
Ident(String),
#[regex(r"-?[0-9]+", |lex| lex.slice().parse::<i64>().ok())]
IntLit(i64),
#[regex(r"-?[0-9]+\.[0-9]+", |lex| lex.slice().parse::<f64>().ok())]
FloatLit(f64),
#[regex(r#""([^"\\]|\\.)*""#, |lex| {
let s = lex.slice();
s[1..s.len()-1].to_string()
})]
StringLit(String),
// Time literal: HH:MM or HH:MM:SS
#[regex(r"[0-9]{2}:[0-9]{2}(:[0-9]{2})?", |lex| lex.slice().to_string())]
TimeLit(String),
// Duration literal: e.g., 2h30m, 45m, 1h
#[regex(r"[0-9]+[hms]([0-9]+[hms])*", |lex| lex.slice().to_string())]
DurationLit(String),
// Punctuation
#[token("{")]
LBrace,
#[token("}")]
RBrace,
#[token("(")]
LParen,
#[token(")")]
RParen,
#[token("[")]
LBracket,
#[token("]")]
RBracket,
#[token(":")]
Colon,
#[token("::")]
ColonColon,
#[token(";")]
Semicolon,
#[token(",")]
Comma,
#[token(".")]
Dot,
#[token("..")]
DotDot,
#[token("*")]
Star,
#[token("?")]
Question,
#[token("@")]
At,
// Operators
#[token(">")]
Gt,
#[token(">=")]
Ge,
#[token("<")]
Lt,
#[token("<=")]
Le,
#[token("->")]
Arrow,
// Special markers
#[token("---")]
ProseMarker,
// Prose block (handled specially)
ProseBlock(super::ast::ProseBlock),
// Error token
Error,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
| Token::Ident(s) => write!(f, "identifier '{}'", s),
| Token::IntLit(n) => write!(f, "integer {}", n),
| Token::FloatLit(n) => write!(f, "float {}", n),
| Token::StringLit(s) => write!(f, "string \"{}\"", s),
| Token::TimeLit(s) => write!(f, "time {}", s),
| Token::DurationLit(s) => write!(f, "duration {}", s),
| Token::ProseBlock(pb) => write!(f, "prose block ---{}", pb.tag),
| _ => write!(f, "{:?}", self),
}
}
}
/// Lexer state machine for handling prose blocks
#[derive(Debug, Clone)]
enum LexerState {
Normal,
ProseTag, // After seeing first ---
ProseContent(String, usize), // Tag + content start position
}
/// Wrapper lexer that handles two-mode scanning
pub struct Lexer<'a> {
source: &'a str,
position: usize,
state: LexerState,
normal_lexer: Option<logos::Lexer<'a, Token>>,
}
impl<'a> Lexer<'a> {
pub fn new(source: &'a str) -> Self {
Self {
source,
position: 0,
state: LexerState::Normal,
normal_lexer: Some(Token::lexer(source)),
}
}
fn scan_prose_tag(&mut self) -> Option<(usize, Token, usize)> {
let _start = self.position;
self.position += 3; // Skip ---
// Skip whitespace
while self.position < self.source.len() &&
self.source[self.position..].starts_with(|c: char| c.is_whitespace())
{
self.position += 1;
}
// Read tag until whitespace or newline
let tag_start = self.position;
while self.position < self.source.len() {
let ch = self.source[self.position..].chars().next().unwrap();
if ch.is_whitespace() {
break;
}
self.position += ch.len_utf8();
}
let tag = self.source[tag_start..self.position].to_string();
// Skip to end of line
while self.position < self.source.len() {
let ch = self.source[self.position..].chars().next().unwrap();
if ch == '\n' {
self.position += 1;
break;
}
self.position += ch.len_utf8();
}
self.state = LexerState::ProseContent(tag, self.position);
self.next()
}
fn scan_prose_content(
&mut self,
tag: String,
content_start: usize,
) -> Option<(usize, Token, usize)> {
let remaining = &self.source[content_start..];
let mut byte_offset = 0;
// Scan until we find closing ---
while byte_offset < remaining.len() {
if remaining[byte_offset..].starts_with("---") {
// Check if it's at start of line (or after whitespace)
let is_line_start = byte_offset == 0 ||
remaining[..byte_offset]
.chars()
.rev()
.take_while(|&c| c != '\n')
.all(|c| c.is_whitespace());
if is_line_start {
// Found closing marker
let content_end = content_start + byte_offset;
let content = self.source[content_start..content_end]
.trim_end()
.to_string();
let start = content_start.saturating_sub(tag.len() + 4); // Include opening ---tag
self.position = content_end + 3; // Skip closing ---
self.state = LexerState::Normal;
self.normal_lexer = Some(Token::lexer(&self.source[self.position..]));
let prose_block = super::ast::ProseBlock {
tag,
content,
span: super::ast::Span::new(start, self.position),
};
return Some((start, Token::ProseBlock(prose_block), self.position));
}
}
// Advance by one UTF-8 character to avoid char boundary issues
if let Some(ch) = remaining[byte_offset..].chars().next() {
byte_offset += ch.len_utf8();
} else {
break;
}
}
// EOF reached without closing marker - treat as error
None
}
}
impl<'a> Iterator for Lexer<'a> {
type Item = (usize, Token, usize);
fn next(&mut self) -> Option<Self::Item> {
match &self.state {
| LexerState::Normal => {
let lexer = self.normal_lexer.as_mut()?;
let token = lexer.next()?;
let span = lexer.span();
match token {
| Ok(Token::ProseMarker) => {
// Switch to prose mode
let marker_pos = span.start;
self.position = marker_pos;
self.state = LexerState::ProseTag;
self.normal_lexer = None;
self.scan_prose_tag()
},
| Ok(tok) => {
self.position = span.end;
Some((span.start, tok, span.end))
},
| Err(_) => {
self.position = span.end;
Some((span.start, Token::Error, span.end))
},
}
},
| LexerState::ProseTag => {
// Should not happen - scan_prose_tag transitions state
None
},
| LexerState::ProseContent(tag, content_start) => {
let tag = tag.clone();
let content_start = *content_start;
self.scan_prose_content(tag, content_start)
},
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_tokens() {
let input = "character Martha { age: 34 }";
let lexer = Lexer::new(input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(
tokens,
vec![
Token::Character,
Token::Ident("Martha".to_string()),
Token::LBrace,
Token::Ident("age".to_string()),
Token::Colon,
Token::IntLit(34),
Token::RBrace,
]
);
}
#[test]
fn test_prose_block() {
let input = r#"
---backstory
Martha grew up in a small town.
She loved baking from a young age.
---
"#;
let lexer = Lexer::new(input.trim());
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match &tokens[0] {
| Token::ProseBlock(pb) => {
assert_eq!(pb.tag, "backstory");
assert!(pb.content.contains("Martha grew up"));
assert!(pb.content.contains("young age"));
},
| _ => panic!("Expected ProseBlock, got {:?}", tokens[0]),
}
}
#[test]
fn test_prose_with_dashes_in_content() {
let input = r#"
---description
She was well-known for her kind-hearted nature.
The bakery had a no-nonsense policy.
---
"#;
let lexer = Lexer::new(input.trim());
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(tokens.len(), 1);
match &tokens[0] {
| Token::ProseBlock(pb) => {
assert_eq!(pb.tag, "description");
assert!(pb.content.contains("well-known"));
assert!(pb.content.contains("kind-hearted"));
assert!(pb.content.contains("no-nonsense"));
},
| _ => panic!("Expected ProseBlock"),
}
}
#[test]
fn test_time_duration_literals() {
let input = "08:30 14:45:00 2h30m 45m";
let lexer = Lexer::new(input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(
tokens,
vec![
Token::TimeLit("08:30".to_string()),
Token::TimeLit("14:45:00".to_string()),
Token::DurationLit("2h30m".to_string()),
Token::DurationLit("45m".to_string()),
]
);
}
#[test]
fn test_range_syntax() {
let input = "20..40";
let lexer = Lexer::new(input);
let tokens: Vec<Token> = lexer.map(|(_, tok, _)| tok).collect();
assert_eq!(
tokens,
vec![Token::IntLit(20), Token::DotDot, Token::IntLit(40),]
);
}
}

56
src/syntax/mod.rs Normal file
View File

@@ -0,0 +1,56 @@
#![allow(unused_assignments)] // False positives in error enum fields used by thiserror
pub mod ast;
pub mod lexer;
// Parser is generated by LALRPOP
#[allow(clippy::all)]
#[allow(unused)]
mod parser;
pub use parser::FileParser;
#[cfg(test)]
mod prop_tests;
use miette::Diagnostic;
use thiserror::Error;
#[derive(Error, Debug, Diagnostic)]
pub enum ParseError {
#[error("Unexpected token: {token}")]
#[diagnostic(help("Check for syntax errors like missing braces, colons, or semicolons. Common issues: forgetting ':' after field names, missing '}}' to close a block, or using reserved keywords as names."))]
UnexpectedToken {
#[allow(dead_code)]
#[allow(unused_assignments)]
token: String,
#[label("unexpected token here")]
span: miette::SourceSpan,
},
#[error("Unexpected end of file")]
#[diagnostic(help("The file ended before a declaration was complete. Check that all blocks are properly closed with '}}', all strings are closed with quotes, and all prose blocks end with '---'."))]
UnexpectedEof {
#[label("file ended here, but expected more input")]
span: miette::SourceSpan,
},
#[error("Invalid token")]
#[diagnostic(help("This character or sequence is not valid in Storybook syntax. Common issues: special characters in names (use letters, numbers, and underscores only), unescaped quotes in strings, or invalid time formats."))]
InvalidToken {
#[label("invalid token here")]
span: miette::SourceSpan,
},
#[error("Unclosed prose block starting with ---{tag}")]
#[diagnostic(help("Prose blocks must be closed with '---' on its own line. Make sure the closing '---' is at the start of a line with no other text before it."))]
UnclosedProseBlock {
#[allow(dead_code)]
#[allow(unused_assignments)]
tag: String,
#[label("prose block starts here but never closes")]
span: miette::SourceSpan,
},
}
pub type ParseResult<T> = Result<T, ParseError>;

520
src/syntax/parser.lalrpop Normal file
View File

@@ -0,0 +1,520 @@
use crate::syntax::ast::*;
use crate::syntax::lexer::Token;
grammar;
// ===== Top-level =====
pub File: File = {
<declarations:Declaration*> => File { declarations }
};
Declaration: Declaration = {
<u:UseDecl> => Declaration::Use(u),
<c:Character> => Declaration::Character(c),
<t:Template> => Declaration::Template(t),
<l:LifeArc> => Declaration::LifeArc(l),
<s:Schedule> => Declaration::Schedule(s),
<b:Behavior> => Declaration::Behavior(b),
<i:Institution> => Declaration::Institution(i),
<r:Relationship> => Declaration::Relationship(r),
<loc:Location> => Declaration::Location(loc),
<sp:Species> => Declaration::Species(sp),
<e:EnumDecl> => Declaration::Enum(e),
};
// ===== Use declarations =====
UseDecl: UseDecl = {
"use" <path:Path> ";" => UseDecl {
path,
kind: UseKind::Single,
span: Span::new(0, 0), // TODO: track actual spans
},
"use" <base:PathSegments> "::" "{" <items:Comma<Ident>> "}" ";" => UseDecl {
path: base,
kind: UseKind::Grouped(items),
span: Span::new(0, 0),
},
"use" <path:PathSegments> "::" "*" ";" => UseDecl {
path,
kind: UseKind::Wildcard,
span: Span::new(0, 0),
},
};
Path: Vec<String> = {
<PathSegments>
};
PathSegments: Vec<String> = {
<Ident> => vec![<>],
<mut v:PathSegments> "::" <i:Ident> => {
v.push(i);
v
}
};
// ===== Character =====
Character: Character = {
"character" <name:Ident> <template:TemplateClause?> "{" <fields:Field*> "}" => Character {
name,
fields,
template,
span: Span::new(0, 0),
}
};
TemplateClause: Vec<String> = {
"from" <t:Ident> <rest:("," <Ident>)*> => {
let mut templates = vec![t];
templates.extend(rest);
templates
}
};
// ===== Template =====
Template: Template = {
"template" <name:Ident> <strict:"strict"?> "{" <includes:Include*> <fields:Field*> "}" => Template {
name,
fields,
strict: strict.is_some(),
includes,
span: Span::new(0, 0),
}
};
Include: String = {
"include" <name:Ident> => name
};
// ===== Fields =====
Field: Field = {
<name:Ident> ":" <value:Value> => Field {
name,
value,
span: Span::new(0, 0),
}
};
Value: Value = {
<IntLit> => Value::Int(<>),
<FloatLit> => Value::Float(<>),
<StringLit> => Value::String(<>),
<BoolLit> => Value::Bool(<>),
<lo:IntLit> ".." <hi:IntLit> => Value::Range(
Box::new(Value::Int(lo)),
Box::new(Value::Int(hi))
),
<lo:FloatLit> ".." <hi:FloatLit> => Value::Range(
Box::new(Value::Float(lo)),
Box::new(Value::Float(hi))
),
<t:Time> => Value::Time(t),
<d:Duration> => Value::Duration(d),
<p:Path> => Value::Identifier(p),
<ProseBlock> => Value::ProseBlock(<>),
"[" <values:Comma<Value>> "]" => Value::List(values),
"{" <fields:Field*> "}" => Value::Object(fields),
<Override> => Value::Override(<>),
};
BoolLit: bool = {
"true" => true,
"false" => false,
};
Time: Time = {
<s:TimeLit> => {
let parts: Vec<&str> = s.split(':').collect();
let hour = parts[0].parse().unwrap_or(0);
let minute = parts[1].parse().unwrap_or(0);
let second = if parts.len() > 2 {
parts[2].parse().unwrap_or(0)
} else {
0
};
Time { hour, minute, second }
}
};
Duration: Duration = {
<s:DurationLit> => {
let mut hours = 0;
let mut minutes = 0;
let mut seconds = 0;
let mut num = String::new();
for ch in s.chars() {
if ch.is_ascii_digit() {
num.push(ch);
} else {
let val: u32 = num.parse().unwrap_or(0);
match ch {
'h' => hours = val,
'm' => minutes = val,
's' => seconds = val,
_ => {}
}
num.clear();
}
}
Duration { hours, minutes, seconds }
}
};
ProseBlock: ProseBlock = {
ProseBlockToken
};
Override: Override = {
"@" <base:Path> "{" <overrides:OverrideOp*> "}" => Override {
base,
overrides,
span: Span::new(0, 0),
}
};
OverrideOp: OverrideOp = {
"remove" <name:Ident> => OverrideOp::Remove(name),
"append" <f:Field> => OverrideOp::Append(f),
<f:Field> => OverrideOp::Set(f),
};
// ===== Life Arc =====
LifeArc: LifeArc = {
"life_arc" <name:Ident> "{" <states:ArcState*> "}" => LifeArc {
name,
states,
span: Span::new(0, 0),
}
};
ArcState: ArcState = {
"state" <name:Ident> "{" <transitions:Transition*> "}" => ArcState {
name,
transitions,
span: Span::new(0, 0),
}
};
Transition: Transition = {
"on" <cond:Expr> "->" <to:Ident> => Transition {
to,
condition: cond,
span: Span::new(0, 0),
}
};
// ===== Schedule =====
Schedule: Schedule = {
"schedule" <name:Ident> "{" <blocks:ScheduleBlock*> "}" => Schedule {
name,
blocks,
span: Span::new(0, 0),
}
};
ScheduleBlock: ScheduleBlock = {
<start:Time> "->" <end:Time> ":" <activity:Ident> => ScheduleBlock {
start,
end,
activity,
span: Span::new(0, 0),
}
};
// ===== Behavior Trees =====
Behavior: Behavior = {
"behavior" <name:Ident> "{" <root:BehaviorNode> "}" => Behavior {
name,
root,
span: Span::new(0, 0),
}
};
BehaviorNode: BehaviorNode = {
<SelectorNode>,
<SequenceNode>,
<ActionNode>,
<SubTreeNode>,
};
SelectorNode: BehaviorNode = {
"?" "{" <nodes:BehaviorNode+> "}" => BehaviorNode::Selector(nodes),
};
SequenceNode: BehaviorNode = {
">" "{" <nodes:BehaviorNode+> "}" => BehaviorNode::Sequence(nodes),
};
ActionNode: BehaviorNode = {
<name:Ident> "(" <params:Comma<Field>> ")" => BehaviorNode::Action(name, params),
<name:Ident> => BehaviorNode::Action(name, vec![]),
};
SubTreeNode: BehaviorNode = {
"@" <path:Path> => BehaviorNode::SubTree(path),
};
// ===== Institution =====
Institution: Institution = {
"institution" <name:Ident> "{" <fields:Field*> "}" => Institution {
name,
fields,
span: Span::new(0, 0),
}
};
// ===== Relationship =====
Relationship: Relationship = {
"relationship" <name:Ident> "{" <participants:Participant+> <fields:Field*> "}" => Relationship {
name,
participants,
fields,
span: Span::new(0, 0),
}
};
Participant: Participant = {
<name:Path> <role:("as" <Ident>)?> <self_block:SelfBlock?> <other_block:OtherBlock?> => Participant {
role,
name,
self_block,
other_block,
span: Span::new(0, 0),
}
};
SelfBlock: Vec<Field> = {
"self" "{" <fields:Field*> "}" => fields
};
OtherBlock: Vec<Field> = {
"other" "{" <fields:Field*> "}" => fields
};
// ===== Location =====
Location: Location = {
"location" <name:Ident> "{" <fields:Field*> "}" => Location {
name,
fields,
span: Span::new(0, 0),
}
};
// ===== Species =====
Species: Species = {
"species" <name:Ident> "{" <fields:Field*> "}" => Species {
name,
fields,
span: Span::new(0, 0),
}
};
// ===== Enum =====
EnumDecl: EnumDecl = {
"enum" <name:Ident> "{" <variants:Comma<Ident>> "}" => EnumDecl {
name,
variants,
span: Span::new(0, 0),
}
};
// ===== Expressions =====
// Expression grammar with proper precedence:
// or > and > not > field_access > comparison > term
Expr: Expr = {
<OrExpr>,
};
// Logical OR (lowest precedence)
OrExpr: Expr = {
<left:OrExpr> "or" <right:AndExpr> => {
Expr::Logical(
Box::new(left),
LogicalOp::Or,
Box::new(right)
)
},
<AndExpr>,
};
// Logical AND
AndExpr: Expr = {
<left:AndExpr> "and" <right:NotExpr> => {
Expr::Logical(
Box::new(left),
LogicalOp::And,
Box::new(right)
)
},
<NotExpr>,
};
// Unary NOT
NotExpr: Expr = {
"not" <expr:NotExpr> => {
Expr::Unary(
UnaryOp::Not,
Box::new(expr)
)
},
<ComparisonExpr>,
};
// Comparison expressions
ComparisonExpr: Expr = {
// Equality: field access or path is (literal or identifier)
<left:FieldAccessExpr> "is" <right:FieldAccessExpr> => {
Expr::Comparison(
Box::new(left),
CompOp::Eq,
Box::new(right)
)
},
// Comparison: field access or path > literal/identifier, etc.
<left:FieldAccessExpr> <op:InequalityOp> <right:FieldAccessExpr> => {
Expr::Comparison(
Box::new(left),
op,
Box::new(right)
)
},
// Just a field access expression
<FieldAccessExpr>,
};
// Field access with dot notation (binds tightest)
FieldAccessExpr: Expr = {
<base:FieldAccessExpr> "." <field:Ident> => {
Expr::FieldAccess(
Box::new(base),
field
)
},
<PrimaryExpr>,
};
// Primary expressions (atoms)
PrimaryExpr: Expr = {
"self" => Expr::Identifier(vec!["self".to_string()]),
"other" => Expr::Identifier(vec!["other".to_string()]),
<Literal>,
<Path> => Expr::Identifier(<>),
};
InequalityOp: CompOp = {
">" => CompOp::Gt,
">=" => CompOp::Ge,
"<" => CompOp::Lt,
"<=" => CompOp::Le,
};
Literal: Expr = {
<IntLit> => Expr::IntLit(<>),
<FloatLit> => Expr::FloatLit(<>),
<StringLit> => Expr::StringLit(<>),
<BoolLit> => Expr::BoolLit(<>),
};
// ===== Helpers =====
Comma<T>: Vec<T> = {
<v:(<T> ",")*> <e:T?> => match e {
None => v,
Some(e) => {
let mut v = v;
v.push(e);
v
}
}
};
// ===== Token conversion =====
extern {
type Location = usize;
type Error = crate::syntax::ParseError;
enum Token {
// Keywords
"use" => Token::Use,
"character" => Token::Character,
"template" => Token::Template,
"life_arc" => Token::LifeArc,
"schedule" => Token::Schedule,
"behavior" => Token::Behavior,
"institution" => Token::Institution,
"relationship" => Token::Relationship,
"location" => Token::Location,
"species" => Token::Species,
"enum" => Token::Enum,
"state" => Token::State,
"on" => Token::On,
"as" => Token::As,
"self" => Token::SelfKw,
"other" => Token::Other,
"remove" => Token::Remove,
"append" => Token::Append,
"forall" => Token::ForAll,
"exists" => Token::Exists,
"in" => Token::In,
"where" => Token::Where,
"and" => Token::And,
"or" => Token::Or,
"not" => Token::Not,
"strict" => Token::Strict,
"include" => Token::Include,
"from" => Token::From,
"is" => Token::Is,
"true" => Token::True,
"false" => Token::False,
// Literals
Ident => Token::Ident(<String>),
IntLit => Token::IntLit(<i64>),
FloatLit => Token::FloatLit(<f64>),
StringLit => Token::StringLit(<String>),
TimeLit => Token::TimeLit(<String>),
DurationLit => Token::DurationLit(<String>),
ProseBlockToken => Token::ProseBlock(<ProseBlock>),
// Punctuation
"{" => Token::LBrace,
"}" => Token::RBrace,
"(" => Token::LParen,
")" => Token::RParen,
"[" => Token::LBracket,
"]" => Token::RBracket,
":" => Token::Colon,
"::" => Token::ColonColon,
";" => Token::Semicolon,
"," => Token::Comma,
"." => Token::Dot,
".." => Token::DotDot,
"*" => Token::Star,
"?" => Token::Question,
"@" => Token::At,
// Operators
">" => Token::Gt,
">=" => Token::Ge,
"<" => Token::Lt,
"<=" => Token::Le,
"->" => Token::Arrow,
}
}

10846
src/syntax/parser.rs Normal file

File diff suppressed because it is too large Load Diff

1441
src/syntax/prop_tests.rs Normal file

File diff suppressed because it is too large Load Diff

228
src/types.rs Normal file
View File

@@ -0,0 +1,228 @@
//! Public types for resolved Storybook entities
//!
//! These types represent fully resolved, validated entities after the
//! resolution pipeline completes. Unlike the AST types which represent
//! raw parsed syntax, these types:
//! - Have all cross-references resolved
//! - Have all overrides applied
//! - Have passed semantic validation
//! - Are ready for consumption by the game engine
use std::collections::HashMap;
use crate::syntax::ast::{
BehaviorNode,
Participant,
ProseBlock,
Span,
Time,
Transition,
Value,
};
/// A fully resolved Storybook project
#[derive(Debug, Clone)]
pub struct ResolvedFile {
pub declarations: Vec<ResolvedDeclaration>,
}
/// A resolved top-level declaration
#[derive(Debug, Clone)]
pub enum ResolvedDeclaration {
Character(ResolvedCharacter),
Template(ResolvedTemplate),
LifeArc(ResolvedLifeArc),
Schedule(ResolvedSchedule),
Behavior(ResolvedBehavior),
Institution(ResolvedInstitution),
Relationship(ResolvedRelationship),
Location(ResolvedLocation),
Species(ResolvedSpecies),
Enum(ResolvedEnum),
}
/// A character with all templates applied and references resolved
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedCharacter {
pub name: String,
pub fields: HashMap<String, Value>,
pub prose_blocks: HashMap<String, ProseBlock>,
pub span: Span,
}
/// A template definition (before instantiation)
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedTemplate {
pub name: String,
pub fields: HashMap<String, Value>,
pub span: Span,
}
/// A life arc with validated state transitions
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedLifeArc {
pub name: String,
pub states: Vec<ResolvedArcState>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedArcState {
pub name: String,
pub transitions: Vec<Transition>,
pub span: Span,
}
/// A schedule with validated non-overlapping blocks
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedSchedule {
pub name: String,
pub blocks: Vec<ResolvedScheduleBlock>,
pub span: Span,
}
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedScheduleBlock {
pub activity: String,
pub start: Time,
pub end: Time,
pub span: Span,
}
/// A behavior tree with validated actions
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedBehavior {
pub name: String,
pub root: BehaviorNode,
pub span: Span,
}
/// An institution with resolved member references
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedInstitution {
pub name: String,
pub fields: HashMap<String, Value>,
pub span: Span,
}
/// A bidirectional relationship with merged self/other blocks
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedRelationship {
pub name: String,
pub participants: Vec<Participant>,
pub fields: HashMap<String, Value>,
pub span: Span,
}
/// A location definition
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedLocation {
pub name: String,
pub fields: HashMap<String, Value>,
pub span: Span,
}
/// A species definition
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedSpecies {
pub name: String,
pub fields: HashMap<String, Value>,
pub span: Span,
}
/// An enum definition with variants
#[derive(Debug, Clone, PartialEq)]
pub struct ResolvedEnum {
pub name: String,
pub variants: Vec<String>,
pub span: Span,
}
impl ResolvedFile {
/// Get all characters in the file
pub fn characters(&self) -> impl Iterator<Item = &ResolvedCharacter> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Character(c) => Some(c),
| _ => None,
})
}
/// Get all relationships in the file
pub fn relationships(&self) -> impl Iterator<Item = &ResolvedRelationship> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Relationship(r) => Some(r),
| _ => None,
})
}
/// Get all institutions in the file
pub fn institutions(&self) -> impl Iterator<Item = &ResolvedInstitution> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Institution(i) => Some(i),
| _ => None,
})
}
/// Get all schedules in the file
pub fn schedules(&self) -> impl Iterator<Item = &ResolvedSchedule> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Schedule(s) => Some(s),
| _ => None,
})
}
/// Get all behavior trees in the file
pub fn behaviors(&self) -> impl Iterator<Item = &ResolvedBehavior> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Behavior(b) => Some(b),
| _ => None,
})
}
/// Get all life arcs in the file
pub fn life_arcs(&self) -> impl Iterator<Item = &ResolvedLifeArc> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::LifeArc(la) => Some(la),
| _ => None,
})
}
/// Get all locations in the file
pub fn locations(&self) -> impl Iterator<Item = &ResolvedLocation> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Location(l) => Some(l),
| _ => None,
})
}
/// Get all species in the file
pub fn species(&self) -> impl Iterator<Item = &ResolvedSpecies> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Species(s) => Some(s),
| _ => None,
})
}
/// Get all enums in the file
pub fn enums(&self) -> impl Iterator<Item = &ResolvedEnum> {
self.declarations.iter().filter_map(|decl| match decl {
| ResolvedDeclaration::Enum(e) => Some(e),
| _ => None,
})
}
/// Find a character by name
pub fn find_character(&self, name: &str) -> Option<&ResolvedCharacter> {
self.characters().find(|c| c.name == name)
}
/// Find a relationship by name
pub fn find_relationship(&self, name: &str) -> Option<&ResolvedRelationship> {
self.relationships().find(|r| r.name == name)
}
/// Find an institution by name
pub fn find_institution(&self, name: &str) -> Option<&ResolvedInstitution> {
self.institutions().find(|i| i.name == name)
}
}