feat(wfe-yaml): add YAML workflow definitions with shell executor

Concourse-CI-inspired YAML format for defining workflows. Compiles
to standard WorkflowDefinition + step factories.

Features:
- Schema parsing with serde_yaml (YamlWorkflow, YamlStep, StepConfig)
- ((var.path)) interpolation from config maps at load time
- YAML anchors (&anchor/*alias) fully supported
- Validation at load time (no runtime surprises)
- Shell executor: runs commands via tokio::process, captures stdout,
  parses ##wfe[output name=value] annotations for structured outputs
- Compiler: sequential wiring, parallel blocks, on_failure/on_success/
  ensure hooks, error behavior mapping
- Public API: load_workflow(), load_workflow_from_str()
- 23 tests (schema, interpolation, compiler, e2e)
This commit is contained in:
2026-03-25 21:32:00 +00:00
parent 8d0f83da3c
commit b89563af63
14 changed files with 1377 additions and 1 deletions

225
wfe-yaml/tests/compiler.rs Normal file
View File

@@ -0,0 +1,225 @@
use std::collections::HashMap;
use std::time::Duration;
use wfe_core::models::error_behavior::ErrorBehavior;
use wfe_yaml::load_workflow_from_str;
#[test]
fn single_step_produces_one_workflow_step() {
let yaml = r#"
workflow:
id: single
version: 1
steps:
- name: hello
type: shell
config:
run: echo hello
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
// The definition should have exactly 1 main step.
let main_steps: Vec<_> = compiled
.definition
.steps
.iter()
.filter(|s| s.name.as_deref() == Some("hello"))
.collect();
assert_eq!(main_steps.len(), 1);
assert_eq!(main_steps[0].id, 0);
}
#[test]
fn two_sequential_steps_wired_correctly() {
let yaml = r#"
workflow:
id: sequential
version: 1
steps:
- name: step-a
type: shell
config:
run: echo a
- name: step-b
type: shell
config:
run: echo b
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step_a = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-a"))
.unwrap();
let step_b = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step-b"))
.unwrap();
// step-a should have an outcome pointing to step-b.
assert_eq!(step_a.outcomes.len(), 1);
assert_eq!(step_a.outcomes[0].next_step, step_b.id);
}
#[test]
fn parallel_block_produces_container_with_children() {
let yaml = r#"
workflow:
id: parallel-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("parallel-group"))
.unwrap();
assert!(
container.step_type.contains("SequenceStep"),
"Container should be a SequenceStep, got: {}",
container.step_type
);
assert_eq!(container.children.len(), 2);
}
#[test]
fn on_failure_creates_compensation_step() {
let yaml = r#"
workflow:
id: compensation-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_failure:
name: rollback
type: shell
config:
run: rollback.sh
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("deploy"))
.unwrap();
assert!(deploy.compensation_step_id.is_some());
assert_eq!(deploy.error_behavior, Some(ErrorBehavior::Compensate));
let rollback = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("rollback"))
.unwrap();
assert_eq!(deploy.compensation_step_id, Some(rollback.id));
}
#[test]
fn error_behavior_maps_correctly() {
let yaml = r#"
workflow:
id: retry-wf
version: 1
error_behavior:
type: retry
interval: 5s
max_retries: 10
steps:
- name: step1
type: shell
config:
run: echo hi
error_behavior:
type: suspend
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(
compiled.definition.default_error_behavior,
ErrorBehavior::Retry {
interval: Duration::from_secs(5),
max_retries: 10,
}
);
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("step1"))
.unwrap();
assert_eq!(step.error_behavior, Some(ErrorBehavior::Suspend));
}
#[test]
fn anchors_compile_correctly() {
let yaml = r#"
workflow:
id: anchor-wf
version: 1
steps:
- name: build
type: shell
config: &default_config
shell: bash
timeout: 5m
run: cargo build
- name: test
type: shell
config: *default_config
"#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have 2 main steps + factories.
let build_step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("build"))
.unwrap();
let test_step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("test"))
.unwrap();
// Both should have step_config.
assert!(build_step.step_config.is_some());
assert!(test_step.step_config.is_some());
// Build should wire to test.
assert_eq!(build_step.outcomes.len(), 1);
assert_eq!(build_step.outcomes[0].next_step, test_step.id);
// Test uses the same config via alias - shell should be bash.
let test_config: wfe_yaml::executors::shell::ShellConfig =
serde_json::from_value(test_step.step_config.clone().unwrap()).unwrap();
assert_eq!(test_config.run, "cargo build");
assert_eq!(test_config.shell, "bash", "shell should be inherited from YAML anchor alias");
}

125
wfe-yaml/tests/e2e_yaml.rs Normal file
View File

@@ -0,0 +1,125 @@
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use wfe::models::WorkflowStatus;
use wfe::{WorkflowHostBuilder, run_workflow_sync};
use wfe_core::test_support::{
InMemoryLockProvider, InMemoryPersistenceProvider, InMemoryQueueProvider,
};
use wfe_yaml::load_workflow_from_str;
async fn run_yaml_workflow(yaml: &str) -> wfe::models::WorkflowInstance {
let config = HashMap::new();
let compiled = load_workflow_from_str(yaml, &config).unwrap();
let persistence = Arc::new(InMemoryPersistenceProvider::new());
let lock = Arc::new(InMemoryLockProvider::new());
let queue = Arc::new(InMemoryQueueProvider::new());
let host = WorkflowHostBuilder::new()
.use_persistence(persistence as Arc<dyn wfe_core::traits::PersistenceProvider>)
.use_lock_provider(lock as Arc<dyn wfe_core::traits::DistributedLockProvider>)
.use_queue_provider(queue as Arc<dyn wfe_core::traits::QueueProvider>)
.build()
.unwrap();
// Register step factories.
for (key, factory) in compiled.step_factories {
host.register_step_factory(&key, factory).await;
}
host.register_workflow_definition(compiled.definition.clone())
.await;
host.start().await.unwrap();
let instance = run_workflow_sync(
&host,
&compiled.definition.id,
compiled.definition.version,
serde_json::json!({}),
Duration::from_secs(10),
)
.await
.unwrap();
host.stop().await;
instance
}
#[tokio::test]
async fn simple_echo_workflow_runs_to_completion() {
let yaml = r#"
workflow:
id: echo-wf
version: 1
steps:
- name: echo-step
type: shell
config:
run: echo "hello from wfe-yaml"
"#;
let instance = run_yaml_workflow(yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
}
#[tokio::test]
async fn workflow_with_output_capture() {
let wfe_prefix = "##wfe";
let yaml = format!(
r#"
workflow:
id: output-wf
version: 1
steps:
- name: capture
type: shell
config:
run: |
echo "{wfe_prefix}[output greeting=hello]"
echo "{wfe_prefix}[output count=42]"
"#
);
let instance = run_yaml_workflow(&yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
// Check that outputs were captured in the workflow data.
if let Some(data) = instance.data.as_object() {
// output_data gets merged into workflow.data by the executor.
// Check that our outputs exist.
if let Some(greeting) = data.get("greeting") {
assert_eq!(greeting.as_str(), Some("hello"));
}
if let Some(count) = data.get("count") {
assert_eq!(count.as_str(), Some("42"));
}
}
}
#[tokio::test]
async fn two_sequential_steps_run_in_order() {
let yaml = r#"
workflow:
id: seq-wf
version: 1
steps:
- name: step-one
type: shell
config:
run: echo step-one
- name: step-two
type: shell
config:
run: echo step-two
"#;
let instance = run_yaml_workflow(yaml).await;
assert_eq!(instance.status, WorkflowStatus::Complete);
// Both steps should have completed.
let complete_count = instance
.execution_pointers
.iter()
.filter(|p| p.status == wfe::models::PointerStatus::Complete)
.count();
assert_eq!(complete_count, 2, "Expected 2 completed execution pointers");
}

View File

@@ -0,0 +1,77 @@
use std::collections::HashMap;
use wfe_yaml::interpolation::interpolate;
#[test]
fn simple_var_replacement() {
let mut config = HashMap::new();
config.insert("name".to_string(), serde_json::json!("world"));
let result = interpolate("hello ((name))", &config).unwrap();
assert_eq!(result, "hello world");
}
#[test]
fn nested_path_replacement() {
let mut config = HashMap::new();
config.insert(
"config".to_string(),
serde_json::json!({
"database": {
"host": "localhost",
"port": 5432
}
}),
);
let result = interpolate("host: ((config.database.host))", &config).unwrap();
assert_eq!(result, "host: localhost");
let result = interpolate("port: ((config.database.port))", &config).unwrap();
assert_eq!(result, "port: 5432");
}
#[test]
fn unresolved_var_returns_error() {
let config = HashMap::new();
let result = interpolate("hello ((missing_var))", &config);
assert!(result.is_err());
let err = result.unwrap_err();
assert!(err.to_string().contains("missing_var"));
}
#[test]
fn no_vars_passes_through_unchanged() {
let config = HashMap::new();
let input = "no variables here";
let result = interpolate(input, &config).unwrap();
assert_eq!(result, input);
}
#[test]
fn multiple_vars_in_one_string() {
let mut config = HashMap::new();
config.insert("first".to_string(), serde_json::json!("hello"));
config.insert("second".to_string(), serde_json::json!("world"));
let result = interpolate("((first)) ((second))!", &config).unwrap();
assert_eq!(result, "hello world!");
}
#[test]
fn interpolation_does_not_break_yaml_anchors() {
let mut config = HashMap::new();
config.insert("version".to_string(), serde_json::json!("1.0"));
// YAML anchor syntax should not be confused with ((var)) syntax.
let yaml = r#"
default: &default
version: ((version))
merged:
<<: *default
"#;
let result = interpolate(yaml, &config).unwrap();
assert!(result.contains("version: 1.0"));
assert!(result.contains("&default"));
assert!(result.contains("*default"));
}

194
wfe-yaml/tests/schema.rs Normal file
View File

@@ -0,0 +1,194 @@
use wfe_yaml::schema::YamlWorkflow;
#[test]
fn parse_minimal_yaml() {
let yaml = r#"
workflow:
id: minimal
version: 1
steps:
- name: hello
type: shell
config:
run: echo hello
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
assert_eq!(parsed.workflow.id, "minimal");
assert_eq!(parsed.workflow.version, 1);
assert_eq!(parsed.workflow.steps.len(), 1);
assert_eq!(parsed.workflow.steps[0].name, "hello");
assert_eq!(
parsed.workflow.steps[0].step_type.as_deref(),
Some("shell")
);
}
#[test]
fn parse_with_parallel_block() {
let yaml = r#"
workflow:
id: parallel-wf
version: 1
steps:
- name: parallel-group
parallel:
- name: task-a
type: shell
config:
run: echo a
- name: task-b
type: shell
config:
run: echo b
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
let step = &parsed.workflow.steps[0];
assert!(step.parallel.is_some());
let children = step.parallel.as_ref().unwrap();
assert_eq!(children.len(), 2);
assert_eq!(children[0].name, "task-a");
assert_eq!(children[1].name, "task-b");
}
#[test]
fn parse_with_hooks() {
let yaml = r#"
workflow:
id: hooks-wf
version: 1
steps:
- name: deploy
type: shell
config:
run: deploy.sh
on_failure:
name: rollback
type: shell
config:
run: rollback.sh
ensure:
name: cleanup
type: shell
config:
run: cleanup.sh
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
let step = &parsed.workflow.steps[0];
assert!(step.on_failure.is_some());
assert_eq!(step.on_failure.as_ref().unwrap().name, "rollback");
assert!(step.ensure.is_some());
assert_eq!(step.ensure.as_ref().unwrap().name, "cleanup");
}
#[test]
fn parse_with_error_behavior() {
let yaml = r#"
workflow:
id: retry-wf
version: 1
error_behavior:
type: retry
interval: 5s
max_retries: 5
steps:
- name: flaky
type: shell
config:
run: flaky-task.sh
error_behavior:
type: terminate
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
let eb = parsed.workflow.error_behavior.as_ref().unwrap();
assert_eq!(eb.behavior_type, "retry");
assert_eq!(eb.interval.as_deref(), Some("5s"));
assert_eq!(eb.max_retries, Some(5));
let step_eb = parsed.workflow.steps[0].error_behavior.as_ref().unwrap();
assert_eq!(step_eb.behavior_type, "terminate");
}
#[test]
fn invalid_yaml_returns_error() {
let yaml = "this is not valid yaml: [";
let result: Result<YamlWorkflow, _> = serde_yaml::from_str(yaml);
assert!(result.is_err());
}
#[test]
fn parse_with_yaml_anchors_and_aliases() {
// Direct anchor/alias: reuse entire config block via *alias.
let yaml = r#"
workflow:
id: test-anchors
version: 1
steps:
- name: build
type: shell
config: &default_config
shell: bash
timeout: 5m
run: cargo build
- name: test
type: shell
config: *default_config
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
assert_eq!(parsed.workflow.steps.len(), 2);
// The build step has the original config.
let build = &parsed.workflow.steps[0];
let build_config = build.config.as_ref().unwrap();
assert_eq!(build_config.shell.as_deref(), Some("bash"));
assert_eq!(build_config.timeout.as_deref(), Some("5m"));
assert_eq!(build_config.run.as_deref(), Some("cargo build"));
// The test step gets the same config via alias.
let test = &parsed.workflow.steps[1];
let test_config = test.config.as_ref().unwrap();
assert_eq!(test_config.shell.as_deref(), Some("bash"));
assert_eq!(test_config.timeout.as_deref(), Some("5m"));
assert_eq!(test_config.run.as_deref(), Some("cargo build"));
}
#[test]
fn parse_with_scalar_anchors() {
// Anchors on scalar values.
let yaml = r#"
workflow:
id: scalar-anchors
version: 1
steps:
- name: step1
type: &step_type shell
config:
run: echo hi
- name: step2
type: *step_type
config:
run: echo bye
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
assert_eq!(parsed.workflow.steps[0].step_type.as_deref(), Some("shell"));
assert_eq!(parsed.workflow.steps[1].step_type.as_deref(), Some("shell"));
}
#[test]
fn parse_with_extra_keys_for_templates() {
let yaml = r#"
workflow:
id: template-wf
version: 1
_templates:
default_shell: bash
steps:
- name: step1
type: shell
config:
run: echo hi
"#;
let parsed: YamlWorkflow = serde_yaml::from_str(yaml).unwrap();
assert_eq!(parsed.workflow.id, "template-wf");
assert_eq!(parsed.workflow.steps.len(), 1);
}