test(wfe-yaml): coverage pass to 90%+ and fix duration parsing bug

Added 51 tests: compiler hooks/parallel/error behavior (20),
validation error paths (15), shell integration tests (7),
lib.rs file loading (5), interpolation edge cases (4).

Fixed parse_duration_ms: "ms" suffix was unreachable because
strip_suffix('s') matched first. Now checks "ms" before "s".

Coverage: 40% → 90.3%. 326 total workspace tests.
This commit is contained in:
2026-03-25 21:42:26 +00:00
parent b89563af63
commit ce68e4beed
6 changed files with 1304 additions and 3 deletions

View File

@@ -277,12 +277,13 @@ fn build_shell_config(step: &YamlStep) -> Result<ShellConfig, YamlWorkflowError>
fn parse_duration_ms(s: &str) -> Option<u64> {
let s = s.trim();
if let Some(secs) = s.strip_suffix('s') {
// Check "ms" before "s" since strip_suffix('s') would also match "500ms"
if let Some(ms) = s.strip_suffix("ms") {
ms.trim().parse::<u64>().ok()
} else if let Some(secs) = s.strip_suffix('s') {
secs.trim().parse::<u64>().ok().map(|v| v * 1000)
} else if let Some(mins) = s.strip_suffix('m') {
mins.trim().parse::<u64>().ok().map(|v| v * 60 * 1000)
} else if let Some(ms) = s.strip_suffix("ms") {
ms.trim().parse::<u64>().ok()
} else {
s.parse::<u64>().ok()
}

View File

@@ -223,3 +223,592 @@ workflow:
assert_eq!(test_config.run, "cargo build");
assert_eq!(test_config.shell, "bash", "shell should be inherited from YAML anchor alias");
}
#[test]
fn on_success_creates_step_wired_after_main() {
let yaml = r#"
workflow:
id: success-hook-wf
version: 1
steps:
- name: build
type: shell
config:
run: cargo build
on_success:
name: notify
type: shell
config:
run: echo "build succeeded"
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let build = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("build"))
.unwrap();
let notify = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("notify"))
.unwrap();
// build should have an outcome pointing to notify with label "success".
assert_eq!(build.outcomes.len(), 1);
assert_eq!(build.outcomes[0].next_step, notify.id);
assert_eq!(build.outcomes[0].label.as_deref(), Some("success"));
}
#[test]
fn ensure_creates_step_wired_after_main() {
let yaml = r#"
workflow:
id: ensure-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("deploy"))
.unwrap();
let cleanup = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("cleanup"))
.unwrap();
// deploy should have an outcome pointing to cleanup with label "ensure".
assert_eq!(deploy.outcomes.len(), 1);
assert_eq!(deploy.outcomes[0].next_step, cleanup.id);
assert_eq!(deploy.outcomes[0].label.as_deref(), Some("ensure"));
}
#[test]
fn ensure_not_wired_when_on_success_present() {
let yaml = r#"
workflow:
id: both-hooks-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_success:
name: notify
type: shell
config:
run: echo ok
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("deploy"))
.unwrap();
// When on_success is present, ensure should NOT add another outcome to the main step.
// Only the on_success outcome should be there.
assert_eq!(deploy.outcomes.len(), 1);
assert_eq!(deploy.outcomes[0].label.as_deref(), Some("success"));
}
#[test]
fn error_behavior_terminate_maps_correctly() {
let yaml = r#"
workflow:
id: terminate-wf
version: 1
error_behavior:
type: terminate
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Terminate
);
}
#[test]
fn error_behavior_compensate_maps_correctly() {
let yaml = r#"
workflow:
id: compensate-wf
version: 1
error_behavior:
type: compensate
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Compensate
);
}
#[test]
fn error_behavior_retry_defaults() {
// retry with no interval or max_retries should use defaults.
let yaml = r#"
workflow:
id: retry-defaults-wf
version: 1
error_behavior:
type: retry
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_secs(60),
max_retries: 3,
}
);
}
#[test]
fn error_behavior_retry_with_minute_interval() {
let yaml = r#"
workflow:
id: retry-min-wf
version: 1
error_behavior:
type: retry
interval: 2m
max_retries: 5
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_millis(2 * 60 * 1000),
max_retries: 5,
}
);
}
#[test]
fn error_behavior_retry_with_raw_number_interval() {
// When interval is a raw number (no suffix), it is treated as milliseconds.
let yaml = r#"
workflow:
id: retry-raw-wf
version: 1
error_behavior:
type: retry
interval: "500"
max_retries: 2
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_millis(500),
max_retries: 2,
}
);
}
#[test]
fn unknown_error_behavior_returns_error() {
let yaml = r#"
workflow:
id: bad-eb-wf
version: 1
error_behavior:
type: explode
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(err.contains("explode"), "Error should mention the invalid type, got: {err}");
}
#[test]
fn parallel_block_children_have_step_configs() {
let yaml = r#"
workflow:
id: parallel-config-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
- name: task-c
type: shell
config:
run: echo c
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("parallel-group"))
.unwrap();
assert_eq!(container.children.len(), 3);
// Each child should have a step_config.
for child_name in &["task-a", "task-b", "task-c"] {
let child = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some(*child_name))
.unwrap();
assert!(child.step_config.is_some(), "Child {child_name} should have step_config");
}
// Factories should include entries for all 3 children.
assert!(compiled.step_factories.len() >= 3);
}
#[test]
fn step_config_serializes_shell_config() {
let yaml = r#"
workflow:
id: config-ser-wf
version: 1
steps:
- name: build
type: shell
config:
run: cargo build
shell: bash
timeout: 30s
env:
RUST_LOG: debug
working_dir: /tmp
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("build"))
.unwrap();
let config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(config.run, "cargo build");
assert_eq!(config.shell, "bash");
assert_eq!(config.timeout_ms, Some(30_000));
assert_eq!(config.env.get("RUST_LOG").unwrap(), "debug");
assert_eq!(config.working_dir.as_deref(), Some("/tmp"));
}
#[test]
fn config_file_field_generates_run_command() {
let yaml = r#"
workflow:
id: file-wf
version: 1
steps:
- name: run-script
type: shell
config:
file: my_script.sh
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-script"))
.unwrap();
let config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(config.run, "sh my_script.sh");
}
#[test]
fn default_shell_is_sh() {
let yaml = r#"
workflow:
id: default-shell-wf
version: 1
steps:
- name: step1
type: shell
config:
run: echo hello
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step1"))
.unwrap();
let config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(config.shell, "sh", "Default shell should be 'sh'");
}
#[test]
fn on_failure_factory_is_registered() {
let yaml = r#"
workflow:
id: factory-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_failure:
name: rollback
type: shell
config:
run: rollback.sh
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have factories for both deploy and rollback.
let has_deploy = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("deploy"));
let has_rollback = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("rollback"));
assert!(has_deploy, "Should have factory for deploy step");
assert!(has_rollback, "Should have factory for rollback step");
}
#[test]
fn on_success_factory_is_registered() {
let yaml = r#"
workflow:
id: success-factory-wf
version: 1
steps:
- name: build
type: shell
config:
run: cargo build
on_success:
name: notify
type: shell
config:
run: echo ok
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_notify = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("notify"));
assert!(has_notify, "Should have factory for on_success step");
}
#[test]
fn ensure_factory_is_registered() {
let yaml = r#"
workflow:
id: ensure-factory-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_cleanup = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("cleanup"));
assert!(has_cleanup, "Should have factory for ensure step");
}
#[test]
fn parallel_with_error_behavior_on_container() {
let yaml = r#"
workflow:
id: parallel-eb-wf
version: 1
steps:
- name: parallel-group
error_behavior:
type: terminate
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("parallel-group"))
.unwrap();
assert_eq!(container.error_behavior, Some(ErrorBehavior::Terminate));
}
#[test]
fn sequential_wiring_with_hooks() {
// When step A has on_success hook, the hook should wire to step B.
let yaml = r#"
workflow:
id: hook-wiring-wf
version: 1
steps:
- name: step-a
type: shell
config:
run: echo a
on_success:
name: hook-a
type: shell
config:
run: echo hook
- name: step-b
type: shell
config:
run: echo b
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step_a = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-a"))
.unwrap();
let hook_a = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("hook-a"))
.unwrap();
let step_b = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-b"))
.unwrap();
// step-a -> hook-a (via on_success outcome).
assert_eq!(step_a.outcomes[0].next_step, hook_a.id);
// hook-a -> step-b (sequential wiring).
assert_eq!(hook_a.outcomes.len(), 1);
assert_eq!(hook_a.outcomes[0].next_step, step_b.id);
}
#[test]
fn workflow_description_is_set() {
let yaml = r#"
workflow:
id: desc-wf
version: 1
description: "A test workflow"
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.description.as_deref(),
Some("A test workflow")
);
}
#[test]
fn missing_config_section_returns_error() {
let yaml = r#"
workflow:
id: no-config-wf
version: 1
steps:
- name: bad-step
type: shell
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("config"),
"Error should mention missing config, got: {err}"
);
}

View File

@@ -75,3 +75,44 @@ merged:
assert!(result.contains("&default"));
assert!(result.contains("*default"));
}
#[test]
fn null_value_interpolated_as_null_string() {
let mut config = HashMap::new();
config.insert("val".to_string(), serde_json::Value::Null);
let result = interpolate("value: ((val))", &config).unwrap();
assert_eq!(result, "value: null");
}
#[test]
fn boolean_value_interpolated() {
let mut config = HashMap::new();
config.insert("flag".to_string(), serde_json::json!(true));
let result = interpolate("enabled: ((flag))", &config).unwrap();
assert_eq!(result, "enabled: true");
}
#[test]
fn nested_path_unresolved_segment_returns_error() {
let mut config = HashMap::new();
config.insert(
"config".to_string(),
serde_json::json!({"database": {"host": "localhost"}}),
);
let result = interpolate("port: ((config.database.port))", &config);
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("config.database.port"));
}
#[test]
fn numeric_value_interpolated() {
let mut config = HashMap::new();
config.insert("count".to_string(), serde_json::json!(42));
let result = interpolate("count: ((count))", &config).unwrap();
assert_eq!(result, "count: 42");
}

104
wfe-yaml/tests/lib_tests.rs Normal file
View File

@@ -0,0 +1,104 @@
use std::collections::HashMap;
use std::io::Write;
use wfe_yaml::{load_workflow, load_workflow_from_str};
#[test]
fn load_workflow_from_file() {
let yaml = r#"
workflow:
id: file-load-wf
version: 1
steps:
- name: hello
type: shell
config:
run: echo hello
"#;
// Write to a temp file manually.
let path = std::path::Path::new("/tmp/wfe_test_load_workflow.yaml");
let mut file = std::fs::File::create(path).unwrap();
file.write_all(yaml.as_bytes()).unwrap();
file.flush().unwrap();
let compiled = load_workflow(path, &HashMap::new()).unwrap();
assert_eq!(compiled.definition.id, "file-load-wf");
assert_eq!(compiled.definition.version, 1);
assert_eq!(compiled.definition.steps.len(), 1);
// Clean up.
let _ = std::fs::remove_file(path);
}
#[test]
fn load_workflow_from_nonexistent_file_returns_error() {
let path = std::path::Path::new("/tmp/nonexistent_wfe_test_file_12345.yaml");
let result = load_workflow(path, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("IO error") || err.contains("No such file"),
"Expected IO error, got: {err}"
);
}
#[test]
fn load_workflow_from_str_with_invalid_yaml_returns_error() {
let yaml = "this is not valid yaml: [[[";
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("YAML parse error"),
"Expected YAML parse error, got: {err}"
);
}
#[test]
fn load_workflow_from_str_with_interpolation() {
let yaml = r#"
workflow:
id: interp-wf
version: 1
steps:
- name: greet
type: shell
config:
run: echo ((message))
"#;
let mut config = HashMap::new();
config.insert("message".to_string(), serde_json::json!("hello world"));
let compiled = load_workflow_from_str(yaml, &config).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("greet"))
.unwrap();
let shell_config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(step.step_config.clone().unwrap()).unwrap();
assert_eq!(shell_config.run, "echo hello world");
}
#[test]
fn load_workflow_from_str_with_unresolved_variable_returns_error() {
let yaml = r#"
workflow:
id: unresolved-wf
version: 1
steps:
- name: step1
type: shell
config:
run: echo ((missing))
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("missing"),
"Expected unresolved variable error, got: {err}"
);
}

238
wfe-yaml/tests/shell.rs Normal file
View File

@@ -0,0 +1,238 @@
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use wfe::models::WorkflowStatus;
use wfe::{WorkflowHostBuilder, run_workflow_sync};
use wfe_core::test_support::{
InMemoryLockProvider, InMemoryPersistenceProvider, InMemoryQueueProvider,
};
use wfe_yaml::load_workflow_from_str;
async fn run_yaml_workflow_with_data(
yaml: &str,
data: serde_json::Value,
) -> wfe::models::WorkflowInstance {
let config = HashMap::new();
let compiled = load_workflow_from_str(yaml, &config).unwrap();
let persistence = Arc::new(InMemoryPersistenceProvider::new());
let lock = Arc::new(InMemoryLockProvider::new());
let queue = Arc::new(InMemoryQueueProvider::new());
let host = WorkflowHostBuilder::new()
.use_persistence(persistence as Arc<dyn wfe_core::traits::PersistenceProvider>)
.use_lock_provider(lock as Arc<dyn wfe_core::traits::DistributedLockProvider>)
.use_queue_provider(queue as Arc<dyn wfe_core::traits::QueueProvider>)
.build()
.unwrap();
for (key, factory) in compiled.step_factories {
host.register_step_factory(&key, factory).await;
}
host.register_workflow_definition(compiled.definition.clone())
.await;
host.start().await.unwrap();
let instance = run_workflow_sync(
&host,
&compiled.definition.id,
compiled.definition.version,
data,
Duration::from_secs(10),
)
.await
.unwrap();
host.stop().await;
instance
}
async fn run_yaml_workflow(yaml: &str) -> wfe::models::WorkflowInstance {
run_yaml_workflow_with_data(yaml, serde_json::json!({})).await
}
#[tokio::test]
async fn simple_echo_captures_stdout() {
let yaml = r#"
workflow:
id: echo-capture-wf
version: 1
steps:
- name: echo-step
type: shell
config:
run: echo "hello world"
"#;
let instance = run_yaml_workflow(yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
// stdout should be captured in workflow data.
if let Some(data) = instance.data.as_object() {
if let Some(stdout) = data.get("echo-step.stdout") {
assert!(
stdout.as_str().unwrap().contains("hello world"),
"stdout should contain 'hello world', got: {}",
stdout
);
}
}
}
#[tokio::test]
async fn wfe_output_parsing() {
let wfe_prefix = "##wfe";
let yaml = format!(
r#"
workflow:
id: output-parse-wf
version: 1
steps:
- name: output-step
type: shell
config:
run: |
echo "{wfe_prefix}[output greeting=hello]"
echo "{wfe_prefix}[output count=42]"
echo "{wfe_prefix}[output path=/usr/local/bin]"
"#
);
let instance = run_yaml_workflow(&yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
if let Some(data) = instance.data.as_object() {
if let Some(greeting) = data.get("greeting") {
assert_eq!(greeting.as_str(), Some("hello"));
}
if let Some(count) = data.get("count") {
assert_eq!(count.as_str(), Some("42"));
}
if let Some(path) = data.get("path") {
assert_eq!(path.as_str(), Some("/usr/local/bin"));
}
}
}
#[tokio::test]
async fn nonzero_exit_code_causes_failure() {
let yaml = r#"
workflow:
id: fail-wf
version: 1
error_behavior:
type: terminate
steps:
- name: fail-step
type: shell
config:
run: exit 1
"#;
let instance = run_yaml_workflow(yaml).await;
assert_eq!(
instance.status,
WorkflowStatus::Terminated,
"Workflow should terminate on non-zero exit code"
);
}
#[tokio::test]
async fn env_vars_from_config_injected() {
let wfe_prefix = "##wfe";
let yaml = format!(
r#"
workflow:
id: env-wf
version: 1
steps:
- name: env-step
type: shell
config:
run: echo "{wfe_prefix}[output my_var=$MY_VAR]"
env:
MY_VAR: custom_value
"#
);
let instance = run_yaml_workflow(&yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
if let Some(data) = instance.data.as_object() {
if let Some(my_var) = data.get("my_var") {
assert_eq!(my_var.as_str(), Some("custom_value"));
}
}
}
#[tokio::test]
async fn workflow_data_injected_as_env_vars() {
let wfe_prefix = "##wfe";
let yaml = format!(
r#"
workflow:
id: data-env-wf
version: 1
steps:
- name: data-step
type: shell
config:
run: echo "{wfe_prefix}[output result=$GREETING]"
"#
);
let instance = run_yaml_workflow_with_data(
&yaml,
serde_json::json!({"greeting": "hi there"}),
)
.await;
assert_eq!(instance.status, WorkflowStatus::Complete);
if let Some(data) = instance.data.as_object() {
if let Some(result) = data.get("result") {
assert_eq!(result.as_str(), Some("hi there"));
}
}
}
#[tokio::test]
async fn working_dir_is_respected() {
let yaml = r#"
workflow:
id: workdir-wf
version: 1
steps:
- name: pwd-step
type: shell
config:
run: pwd
working_dir: /tmp
"#;
let instance = run_yaml_workflow(yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
if let Some(data) = instance.data.as_object() {
if let Some(stdout) = data.get("pwd-step.stdout") {
let output = stdout.as_str().unwrap().trim();
// On macOS, /tmp -> /private/tmp
assert!(
output == "/tmp" || output == "/private/tmp",
"Expected /tmp or /private/tmp, got: {output}"
);
}
}
}
#[tokio::test]
async fn shell_step_with_bash() {
let yaml = r#"
workflow:
id: bash-wf
version: 1
steps:
- name: bash-step
type: shell
config:
run: echo "using bash"
shell: bash
"#;
let instance = run_yaml_workflow(yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
}

View File

@@ -0,0 +1,328 @@
use std::collections::HashMap;
use wfe_yaml::load_workflow_from_str;
#[test]
fn empty_steps_returns_validation_error() {
let yaml = r#"
workflow:
id: empty-wf
version: 1
steps: []
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("at least one step"),
"Expected 'at least one step' error, got: {err}"
);
}
#[test]
fn step_with_no_type_and_no_parallel_returns_error() {
let yaml = r#"
workflow:
id: no-type-wf
version: 1
steps:
- name: bad-step
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("type") && err.contains("parallel"),
"Expected error about missing type or parallel, got: {err}"
);
}
#[test]
fn step_with_both_type_and_parallel_returns_error() {
let yaml = r#"
workflow:
id: both-wf
version: 1
steps:
- name: bad-step
type: shell
parallel:
- name: child
type: shell
config:
run: echo hi
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("cannot have both"),
"Expected 'cannot have both' error, got: {err}"
);
}
#[test]
fn duplicate_step_names_returns_error() {
let yaml = r#"
workflow:
id: dup-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: echo a
- name: deploy
type: shell
config:
run: echo b
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("Duplicate step name") && err.contains("deploy"),
"Expected duplicate name error, got: {err}"
);
}
#[test]
fn shell_step_missing_run_and_file_returns_error() {
let yaml = r#"
workflow:
id: no-run-wf
version: 1
steps:
- name: bad-shell
type: shell
config:
shell: bash
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("config.run") || err.contains("config.file"),
"Expected error about missing run/file, got: {err}"
);
}
#[test]
fn shell_step_missing_config_section_returns_error() {
let yaml = r#"
workflow:
id: no-config-wf
version: 1
steps:
- name: bad-shell
type: shell
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("config"),
"Expected error about missing config, got: {err}"
);
}
#[test]
fn invalid_error_behavior_type_returns_error() {
let yaml = r#"
workflow:
id: bad-eb-wf
version: 1
error_behavior:
type: panic
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("panic"),
"Expected error mentioning invalid type, got: {err}"
);
}
#[test]
fn invalid_step_level_error_behavior_returns_error() {
let yaml = r#"
workflow:
id: bad-step-eb-wf
version: 1
steps:
- name: step1
type: shell
config:
run: echo hi
error_behavior:
type: crash
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("crash"),
"Expected error mentioning invalid type, got: {err}"
);
}
#[test]
fn valid_minimal_step_passes_validation() {
let yaml = r#"
workflow:
id: valid-wf
version: 1
steps:
- name: hello
type: shell
config:
run: echo hello
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_ok(), "Valid workflow should pass, got: {:?}", result.err());
}
#[test]
fn valid_parallel_step_passes_validation() {
let yaml = r#"
workflow:
id: valid-parallel-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_ok(), "Valid parallel workflow should pass, got: {:?}", result.err());
}
#[test]
fn hook_steps_are_also_validated_for_duplicates() {
let yaml = r#"
workflow:
id: hook-dup-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_failure:
name: deploy
type: shell
config:
run: rollback.sh
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("Duplicate step name"),
"Expected duplicate name error for hook, got: {err}"
);
}
#[test]
fn on_success_hook_validated() {
let yaml = r#"
workflow:
id: hook-val-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_success:
name: notify
type: shell
config:
run: echo ok
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_ok(), "Valid on_success hook should pass, got: {:?}", result.err());
}
#[test]
fn ensure_hook_validated() {
let yaml = r#"
workflow:
id: ensure-val-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_ok(), "Valid ensure hook should pass, got: {:?}", result.err());
}
#[test]
fn all_valid_error_behavior_types_pass() {
for eb_type in &["retry", "suspend", "terminate", "compensate"] {
let yaml = format!(
r#"
workflow:
id: eb-{eb_type}-wf
version: 1
error_behavior:
type: {eb_type}
steps:
- name: step1
type: shell
config:
run: echo hi
"#
);
let result = load_workflow_from_str(&yaml, &HashMap::new());
assert!(
result.is_ok(),
"Error behavior type '{eb_type}' should be valid, got: {:?}",
result.err()
);
}
}
#[test]
fn parallel_children_duplicate_names_detected() {
let yaml = r#"
workflow:
id: par-dup-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-a
type: shell
config:
run: echo b
"#;
let result = load_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(
err.contains("Duplicate step name") && err.contains("task-a"),
"Expected duplicate name in parallel children, got: {err}"
);
}