Files
storybook/src/lsp/diagnostics.rs
Sienna Meridian Satterwhite 16deb5d237 release: Storybook v0.2.0 - Major syntax and features update
BREAKING CHANGES:
- Relationship syntax now requires blocks for all participants
- Removed self/other perspective blocks from relationships
- Replaced 'guard' keyword with 'if' for behavior tree decorators

Language Features:
- Add tree-sitter grammar with improved if/condition disambiguation
- Add comprehensive tutorial and reference documentation
- Add SBIR v0.2.0 binary format specification
- Add resource linking system for behaviors and schedules
- Add year-long schedule patterns (day, season, recurrence)
- Add behavior tree enhancements (named nodes, decorators)

Documentation:
- Complete tutorial series (9 chapters) with baker family examples
- Complete reference documentation for all language features
- SBIR v0.2.0 specification with binary format details
- Added locations and institutions documentation

Examples:
- Convert all examples to baker family scenario
- Add comprehensive working examples

Tooling:
- Zed extension with LSP integration
- Tree-sitter grammar for syntax highlighting
- Build scripts and development tools

Version Updates:
- Main package: 0.1.0 → 0.2.0
- Tree-sitter grammar: 0.1.0 → 0.2.0
- Zed extension: 0.1.0 → 0.2.0
- Storybook editor: 0.1.0 → 0.2.0
2026-02-13 21:52:03 +00:00

164 lines
4.3 KiB
Rust

//! Diagnostics conversion from Storybook errors to LSP diagnostics
use tower_lsp::lsp_types::{
Diagnostic,
DiagnosticSeverity,
Position,
Range,
};
use crate::syntax::lexer::{
Lexer,
Token,
};
/// Compute diagnostics for a document
pub fn compute_diagnostics(text: &str) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new();
// Try to parse the document
// For now, we'll do a simple check - a real implementation would use the full
// parser
match try_parse(text) {
| Ok(_) => {
// No syntax errors
},
| Err(errors) => {
for error in errors {
diagnostics.push(error);
}
},
}
diagnostics
}
/// Attempt to parse the document and return diagnostics
fn try_parse(text: &str) -> Result<(), Vec<Diagnostic>> {
// TODO: Integrate with actual parser
// For now, return Ok for all documents
// This will be implemented when we add Span tracking
// Simple placeholder: check for common syntax errors using lexer tokens
let mut errors = Vec::new();
// Track brace nesting level and position of each brace
let mut nesting_stack: Vec<usize> = Vec::new(); // Stack of opening brace positions
let lexer = Lexer::new(text);
for (offset, token, _end) in lexer {
match token {
| Token::LBrace => {
nesting_stack.push(offset);
},
| Token::RBrace => {
if nesting_stack.is_empty() {
// Unexpected closing brace - no matching opening brace
let pos = byte_offset_to_position(text, offset);
errors.push(Diagnostic {
range: Range {
start: pos,
end: Position {
line: pos.line,
character: pos.character + 1,
},
},
severity: Some(DiagnosticSeverity::ERROR),
code: None,
source: Some("storybook".to_string()),
message: "Unexpected closing brace".to_string(),
related_information: None,
tags: None,
code_description: None,
data: None,
});
} else {
nesting_stack.pop();
}
},
| _ => {},
}
}
// Note: We don't report unclosed braces (nesting_stack not empty)
// because those are common in incomplete/in-progress code
if errors.is_empty() {
Ok(())
} else {
Err(errors)
}
}
fn byte_offset_to_line(text: &str, offset: usize) -> usize {
let mut line = 0;
let mut current_offset = 0;
for ch in text.chars() {
if current_offset >= offset {
break;
}
if ch == '\n' {
line += 1;
}
current_offset += ch.len_utf8();
}
line
}
/// Convert a byte offset to line/column position
/// This is a placeholder - will be replaced when we have proper Span tracking
pub fn byte_offset_to_position(text: &str, offset: usize) -> Position {
let mut line = 0;
let mut character = 0;
let mut current_offset = 0;
for ch in text.chars() {
if current_offset >= offset {
break;
}
if ch == '\n' {
line += 1;
character = 0;
} else {
character += 1;
}
current_offset += ch.len_utf8();
}
Position {
line: line as u32,
character: character as u32,
}
}
/// Create a diagnostic from a span and message
pub fn create_diagnostic(
text: &str,
start: usize,
end: usize,
message: String,
severity: DiagnosticSeverity,
) -> Diagnostic {
Diagnostic {
range: Range {
start: byte_offset_to_position(text, start),
end: byte_offset_to_position(text, end),
},
severity: Some(severity),
code: None,
source: Some("storybook".to_string()),
message,
related_information: None,
tags: None,
code_description: None,
data: None,
}
}