Files
wfe/wfe-yaml/tests/compiler.rs
Sienna Meridian Satterwhite bf252c51f0 feat(wfe-yaml): add workflow step type, cross-ref validation, cycle detection
Compiler dispatches type: workflow to SubWorkflowStep. Validation
detects circular workflow references via DFS with coloring. Cross-
workflow reference checking for multi-workflow files. Duplicate
workflow ID detection. 28 edge case tests for validation paths.
2026-03-26 14:14:39 +00:00

1016 lines
24 KiB
Rust

use std::collections::HashMap;
use std::time::Duration;
use wfe_core::models::error_behavior::ErrorBehavior;
use wfe_yaml::{load_single_workflow_from_str, load_workflow_from_str};
#[test]
fn single_step_produces_one_workflow_step() {
let yaml = r#"
workflow:
id: single
version: 1
steps:
- name: hello
type: shell
config:
run: echo hello
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// The definition should have exactly 1 main step.
let main_steps: Vec<_> = compiled
.definition
.steps
.iter()
.filter(|s| s.name.as_deref() == Some("hello"))
.collect();
assert_eq!(main_steps.len(), 1);
assert_eq!(main_steps[0].id, 0);
}
#[test]
fn two_sequential_steps_wired_correctly() {
let yaml = r#"
workflow:
id: sequential
version: 1
steps:
- name: step-a
type: shell
config:
run: echo a
- name: step-b
type: shell
config:
run: echo b
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step_a = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-a"))
.unwrap();
let step_b = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-b"))
.unwrap();
// step-a should have an outcome pointing to step-b.
assert_eq!(step_a.outcomes.len(), 1);
assert_eq!(step_a.outcomes[0].next_step, step_b.id);
}
#[test]
fn parallel_block_produces_container_with_children() {
let yaml = r#"
workflow:
id: parallel-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("parallel-group"))
.unwrap();
assert!(
container.step_type.contains("SequenceStep"),
"Container should be a SequenceStep, got: {}",
container.step_type
);
assert_eq!(container.children.len(), 2);
}
#[test]
fn on_failure_creates_compensation_step() {
let yaml = r#"
workflow:
id: compensation-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_failure:
name: rollback
type: shell
config:
run: rollback.sh
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("deploy"))
.unwrap();
assert!(deploy.compensation_step_id.is_some());
assert_eq!(deploy.error_behavior, Some(ErrorBehavior::Compensate));
let rollback = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("rollback"))
.unwrap();
assert_eq!(deploy.compensation_step_id, Some(rollback.id));
}
#[test]
fn error_behavior_maps_correctly() {
let yaml = r#"
workflow:
id: retry-wf
version: 1
error_behavior:
type: retry
interval: 5s
max_retries: 10
steps:
- name: step1
type: shell
config:
run: echo hi
error_behavior:
type: suspend
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_secs(5),
max_retries: 10,
}
);
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step1"))
.unwrap();
assert_eq!(step.error_behavior, Some(ErrorBehavior::Suspend));
}
#[test]
fn anchors_compile_correctly() {
let yaml = r#"
workflow:
id: anchor-wf
version: 1
steps:
- name: build
type: shell
config: &default_config
shell: bash
timeout: 5m
run: cargo build
- name: test
type: shell
config: *default_config
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have 2 main steps + factories.
let build_step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("build"))
.unwrap();
let test_step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("test"))
.unwrap();
// Both should have step_config.
assert!(build_step.step_config.is_some());
assert!(test_step.step_config.is_some());
// Build should wire to test.
assert_eq!(build_step.outcomes.len(), 1);
assert_eq!(build_step.outcomes[0].next_step, test_step.id);
// Test uses the same config via alias - shell should be bash.
let test_config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(test_step.step_config.clone().unwrap()).unwrap();
assert_eq!(test_config.run, "cargo build");
assert_eq!(test_config.shell, "bash", "shell should be inherited from YAML anchor alias");
}
#[test]
fn on_success_creates_step_wired_after_main() {
let yaml = r#"
workflow:
id: success-hook-wf
version: 1
steps:
- name: build
type: shell
config:
run: cargo build
on_success:
name: notify
type: shell
config:
run: echo "build succeeded"
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let build = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("build"))
.unwrap();
let notify = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("notify"))
.unwrap();
// build should have an outcome pointing to notify with label "success".
assert_eq!(build.outcomes.len(), 1);
assert_eq!(build.outcomes[0].next_step, notify.id);
assert_eq!(build.outcomes[0].label.as_deref(), Some("success"));
}
#[test]
fn ensure_creates_step_wired_after_main() {
let yaml = r#"
workflow:
id: ensure-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("deploy"))
.unwrap();
let cleanup = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("cleanup"))
.unwrap();
// deploy should have an outcome pointing to cleanup with label "ensure".
assert_eq!(deploy.outcomes.len(), 1);
assert_eq!(deploy.outcomes[0].next_step, cleanup.id);
assert_eq!(deploy.outcomes[0].label.as_deref(), Some("ensure"));
}
#[test]
fn ensure_not_wired_when_on_success_present() {
let yaml = r#"
workflow:
id: both-hooks-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_success:
name: notify
type: shell
config:
run: echo ok
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("deploy"))
.unwrap();
// When on_success is present, ensure should NOT add another outcome to the main step.
// Only the on_success outcome should be there.
assert_eq!(deploy.outcomes.len(), 1);
assert_eq!(deploy.outcomes[0].label.as_deref(), Some("success"));
}
#[test]
fn error_behavior_terminate_maps_correctly() {
let yaml = r#"
workflow:
id: terminate-wf
version: 1
error_behavior:
type: terminate
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Terminate
);
}
#[test]
fn error_behavior_compensate_maps_correctly() {
let yaml = r#"
workflow:
id: compensate-wf
version: 1
error_behavior:
type: compensate
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Compensate
);
}
#[test]
fn error_behavior_retry_defaults() {
// retry with no interval or max_retries should use defaults.
let yaml = r#"
workflow:
id: retry-defaults-wf
version: 1
error_behavior:
type: retry
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_secs(60),
max_retries: 3,
}
);
}
#[test]
fn error_behavior_retry_with_minute_interval() {
let yaml = r#"
workflow:
id: retry-min-wf
version: 1
error_behavior:
type: retry
interval: 2m
max_retries: 5
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_millis(2 * 60 * 1000),
max_retries: 5,
}
);
}
#[test]
fn error_behavior_retry_with_raw_number_interval() {
// When interval is a raw number (no suffix), it is treated as milliseconds.
let yaml = r#"
workflow:
id: retry-raw-wf
version: 1
error_behavior:
type: retry
interval: "500"
max_retries: 2
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_millis(500),
max_retries: 2,
}
);
}
#[test]
fn unknown_error_behavior_returns_error() {
let yaml = r#"
workflow:
id: bad-eb-wf
version: 1
error_behavior:
type: explode
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let result = load_single_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(err.contains("explode"), "Error should mention the invalid type, got: {err}");
}
#[test]
fn parallel_block_children_have_step_configs() {
let yaml = r#"
workflow:
id: parallel-config-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
- name: task-c
type: shell
config:
run: echo c
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("parallel-group"))
.unwrap();
assert_eq!(container.children.len(), 3);
// Each child should have a step_config.
for child_name in &["task-a", "task-b", "task-c"] {
let child = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some(*child_name))
.unwrap();
assert!(child.step_config.is_some(), "Child {child_name} should have step_config");
}
// Factories should include entries for all 3 children.
assert!(compiled.step_factories.len() >= 3);
}
#[test]
fn step_config_serializes_shell_config() {
let yaml = r#"
workflow:
id: config-ser-wf
version: 1
steps:
- name: build
type: shell
config:
run: cargo build
shell: bash
timeout: 30s
env:
RUST_LOG: debug
working_dir: /tmp
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("build"))
.unwrap();
let config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(config.run, "cargo build");
assert_eq!(config.shell, "bash");
assert_eq!(config.timeout_ms, Some(30_000));
assert_eq!(config.env.get("RUST_LOG").unwrap(), "debug");
assert_eq!(config.working_dir.as_deref(), Some("/tmp"));
}
#[test]
fn config_file_field_generates_run_command() {
let yaml = r#"
workflow:
id: file-wf
version: 1
steps:
- name: run-script
type: shell
config:
file: my_script.sh
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-script"))
.unwrap();
let config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(config.run, "sh my_script.sh");
}
#[test]
fn default_shell_is_sh() {
let yaml = r#"
workflow:
id: default-shell-wf
version: 1
steps:
- name: step1
type: shell
config:
run: echo hello
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step1"))
.unwrap();
let config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(config.shell, "sh", "Default shell should be 'sh'");
}
#[test]
fn on_failure_factory_is_registered() {
let yaml = r#"
workflow:
id: factory-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_failure:
name: rollback
type: shell
config:
run: rollback.sh
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have factories for both deploy and rollback.
let has_deploy = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("deploy"));
let has_rollback = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("rollback"));
assert!(has_deploy, "Should have factory for deploy step");
assert!(has_rollback, "Should have factory for rollback step");
}
#[test]
fn on_success_factory_is_registered() {
let yaml = r#"
workflow:
id: success-factory-wf
version: 1
steps:
- name: build
type: shell
config:
run: cargo build
on_success:
name: notify
type: shell
config:
run: echo ok
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_notify = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("notify"));
assert!(has_notify, "Should have factory for on_success step");
}
#[test]
fn ensure_factory_is_registered() {
let yaml = r#"
workflow:
id: ensure-factory-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_cleanup = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("cleanup"));
assert!(has_cleanup, "Should have factory for ensure step");
}
#[test]
fn parallel_with_error_behavior_on_container() {
let yaml = r#"
workflow:
id: parallel-eb-wf
version: 1
steps:
- name: parallel-group
error_behavior:
type: terminate
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("parallel-group"))
.unwrap();
assert_eq!(container.error_behavior, Some(ErrorBehavior::Terminate));
}
#[test]
fn sequential_wiring_with_hooks() {
// When step A has on_success hook, the hook should wire to step B.
let yaml = r#"
workflow:
id: hook-wiring-wf
version: 1
steps:
- name: step-a
type: shell
config:
run: echo a
on_success:
name: hook-a
type: shell
config:
run: echo hook
- name: step-b
type: shell
config:
run: echo b
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step_a = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-a"))
.unwrap();
let hook_a = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("hook-a"))
.unwrap();
let step_b = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-b"))
.unwrap();
// step-a -> hook-a (via on_success outcome).
assert_eq!(step_a.outcomes[0].next_step, hook_a.id);
// hook-a -> step-b (sequential wiring).
assert_eq!(hook_a.outcomes.len(), 1);
assert_eq!(hook_a.outcomes[0].next_step, step_b.id);
}
#[test]
fn workflow_description_is_set() {
let yaml = r#"
workflow:
id: desc-wf
version: 1
description: "A test workflow"
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.description.as_deref(),
Some("A test workflow")
);
}
#[test]
fn missing_config_section_returns_error() {
let yaml = r#"
workflow:
id: no-config-wf
version: 1
steps:
- name: bad-step
type: shell
"#;
let result = load_single_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("config"),
"Error should mention missing config, got: {err}"
);
}
// --- Workflow step compilation tests ---
#[test]
fn workflow_step_compiles_correctly() {
let yaml = r#"
workflow:
id: parent-wf
version: 1
steps:
- name: run-child
type: workflow
config:
workflow: child-wf
workflow_version: 3
outputs:
- name: result
- name: status
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-child"))
.unwrap();
assert!(step.step_type.contains("workflow"));
assert!(step.step_config.is_some());
// Verify the serialized config contains the workflow_id and version.
let config: serde_json::Value = step.step_config.clone().unwrap();
assert_eq!(config["workflow_id"].as_str(), Some("child-wf"));
assert_eq!(config["version"].as_u64(), Some(3));
assert_eq!(config["output_keys"].as_array().unwrap().len(), 2);
}
#[test]
fn workflow_step_version_defaults_to_1() {
let yaml = r#"
workflow:
id: parent-wf
version: 1
steps:
- name: run-child
type: workflow
config:
workflow: child-wf
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-child"))
.unwrap();
let config: serde_json::Value = step.step_config.clone().unwrap();
assert_eq!(config["version"].as_u64(), Some(1));
}
#[test]
fn workflow_step_factory_is_registered() {
let yaml = r#"
workflow:
id: parent-wf
version: 1
steps:
- name: run-child
type: workflow
config:
workflow: child-wf
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_workflow_factory = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("workflow") && key.contains("run-child"));
assert!(
has_workflow_factory,
"Should have factory for workflow step"
);
}
#[test]
fn compile_multi_workflow_file() {
let yaml = r#"
workflows:
- id: build
version: 1
steps:
- name: compile
type: shell
config:
run: cargo build
- id: test
version: 1
steps:
- name: run-tests
type: shell
config:
run: cargo test
"#;
let workflows = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(workflows.len(), 2);
assert_eq!(workflows[0].definition.id, "build");
assert_eq!(workflows[1].definition.id, "test");
}
#[test]
fn compile_multi_workflow_with_cross_references() {
let yaml = r#"
workflows:
- id: pipeline
version: 1
steps:
- name: run-build
type: workflow
config:
workflow: build
- id: build
version: 1
steps:
- name: compile
type: shell
config:
run: cargo build
"#;
let workflows = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(workflows.len(), 2);
// The pipeline workflow should have a workflow step.
let pipeline = &workflows[0];
let step = pipeline
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-build"))
.unwrap();
assert!(step.step_type.contains("workflow"));
}
#[test]
fn workflow_step_with_mixed_steps() {
let yaml = r#"
workflow:
id: mixed-wf
version: 1
steps:
- name: setup
type: shell
config:
run: echo setup
- name: run-child
type: workflow
config:
workflow: child-wf
- name: cleanup
type: shell
config:
run: echo cleanup
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have 3 main steps.
let step_names: Vec<_> = compiled
.definition
.steps
.iter()
.filter_map(|s| s.name.as_deref())
.collect();
assert!(step_names.contains(&"setup"));
assert!(step_names.contains(&"run-child"));
assert!(step_names.contains(&"cleanup"));
// setup -> run-child -> cleanup wiring.
let setup = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("setup"))
.unwrap();
let run_child = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-child"))
.unwrap();
let cleanup = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("cleanup"))
.unwrap();
assert_eq!(setup.outcomes[0].next_step, run_child.id);
assert_eq!(run_child.outcomes[0].next_step, cleanup.id);
}