feat(wfe-yaml): add workflow step type, cross-ref validation, cycle detection

Compiler dispatches type: workflow to SubWorkflowStep. Validation
detects circular workflow references via DFS with coloring. Cross-
workflow reference checking for multi-workflow files. Duplicate
workflow ID detection. 28 edge case tests for validation paths.
This commit is contained in:
2026-03-26 14:14:39 +00:00
parent 821ef2f570
commit bf252c51f0
4 changed files with 1555 additions and 44 deletions

View File

@@ -1,5 +1,6 @@
use std::time::Duration; use std::time::Duration;
use serde::Serialize;
use wfe_core::models::error_behavior::ErrorBehavior; use wfe_core::models::error_behavior::ErrorBehavior;
use wfe_core::models::workflow_definition::{StepOutcome, WorkflowDefinition, WorkflowStep}; use wfe_core::models::workflow_definition::{StepOutcome, WorkflowDefinition, WorkflowStep};
use wfe_core::traits::StepBody; use wfe_core::traits::StepBody;
@@ -14,6 +15,38 @@ use wfe_buildkit::{BuildkitConfig, BuildkitStep};
use wfe_containerd::{ContainerdConfig, ContainerdStep}; use wfe_containerd::{ContainerdConfig, ContainerdStep};
use crate::schema::{WorkflowSpec, YamlErrorBehavior, YamlStep}; use crate::schema::{WorkflowSpec, YamlErrorBehavior, YamlStep};
/// Configuration for a sub-workflow step.
#[derive(Debug, Clone, Serialize)]
pub struct SubWorkflowConfig {
pub workflow_id: String,
pub version: u32,
pub output_keys: Vec<String>,
}
/// Placeholder step body for sub-workflow steps.
///
/// This is a compile-time placeholder. When wfe-core provides a real
/// `SubWorkflowStep`, it should replace this. The placeholder always
/// returns `ExecutionResult::Next` so compilation and basic tests work.
#[derive(Debug, Default)]
pub struct SubWorkflowPlaceholderStep {
pub workflow_id: String,
pub version: u32,
pub output_keys: Vec<String>,
}
#[async_trait::async_trait]
impl StepBody for SubWorkflowPlaceholderStep {
async fn run(
&mut self,
context: &wfe_core::traits::StepExecutionContext<'_>,
) -> wfe_core::Result<wfe_core::models::ExecutionResult> {
let _ = context;
// Placeholder: a real implementation would start the child workflow.
Ok(wfe_core::models::ExecutionResult::next())
}
}
/// Factory type alias for step creation closures. /// Factory type alias for step creation closures.
pub type StepFactory = Box<dyn Fn() -> Box<dyn StepBody> + Send + Sync>; pub type StepFactory = Box<dyn Fn() -> Box<dyn StepBody> + Send + Sync>;
@@ -284,6 +317,43 @@ fn build_step_config_and_factory(
}); });
Ok((key, value, factory)) Ok((key, value, factory))
} }
"workflow" => {
let config = step.config.as_ref().ok_or_else(|| {
YamlWorkflowError::Compilation(format!(
"Workflow step '{}' is missing 'config' section",
step.name
))
})?;
let child_workflow_id = config.child_workflow.as_ref().ok_or_else(|| {
YamlWorkflowError::Compilation(format!(
"Workflow step '{}' must have 'config.workflow'",
step.name
))
})?;
let child_version = config.child_version.unwrap_or(1);
let sub_config = SubWorkflowConfig {
workflow_id: child_workflow_id.clone(),
version: child_version,
output_keys: step.outputs.iter().map(|o| o.name.clone()).collect(),
};
let key = format!("wfe_yaml::workflow::{}", step.name);
let value = serde_json::to_value(&sub_config).map_err(|e| {
YamlWorkflowError::Compilation(format!(
"Failed to serialize workflow config: {e}"
))
})?;
let config_clone = sub_config.clone();
let factory: StepFactory = Box::new(move || {
Box::new(SubWorkflowPlaceholderStep {
workflow_id: config_clone.workflow_id.clone(),
version: config_clone.version,
output_keys: config_clone.output_keys.clone(),
}) as Box<dyn StepBody>
});
Ok((key, value, factory))
}
other => Err(YamlWorkflowError::Compilation(format!( other => Err(YamlWorkflowError::Compilation(format!(
"Unknown step type: '{other}'" "Unknown step type: '{other}'"
))), ))),

View File

@@ -1,4 +1,4 @@
use std::collections::HashSet; use std::collections::{HashMap, HashSet};
use crate::error::YamlWorkflowError; use crate::error::YamlWorkflowError;
use crate::schema::{WorkflowSpec, YamlStep}; use crate::schema::{WorkflowSpec, YamlStep};
@@ -22,6 +22,140 @@ pub fn validate(spec: &WorkflowSpec) -> Result<(), YamlWorkflowError> {
Ok(()) Ok(())
} }
/// Validate multiple workflow specs from a multi-workflow file.
/// Checks cross-workflow references and cycles in addition to per-workflow validation.
pub fn validate_multi(specs: &[WorkflowSpec]) -> Result<(), YamlWorkflowError> {
// Validate each workflow individually.
for spec in specs {
validate(spec)?;
}
// Check for duplicate workflow IDs.
let mut seen_ids = HashSet::new();
for spec in specs {
if !seen_ids.insert(&spec.id) {
return Err(YamlWorkflowError::Validation(format!(
"Duplicate workflow ID: '{}'",
spec.id
)));
}
}
// Validate cross-workflow references and detect cycles.
validate_workflow_references(specs)?;
Ok(())
}
/// Validate that workflow step references point to known workflows
/// and detect circular dependencies.
fn validate_workflow_references(specs: &[WorkflowSpec]) -> Result<(), YamlWorkflowError> {
let known_ids: HashSet<&str> = specs.iter().map(|s| s.id.as_str()).collect();
// Build a dependency graph: workflow_id -> set of referenced workflow_ids.
let mut deps: HashMap<&str, HashSet<&str>> = HashMap::new();
for spec in specs {
let mut spec_deps = HashSet::new();
collect_workflow_refs(&spec.steps, &mut spec_deps);
deps.insert(spec.id.as_str(), spec_deps);
}
// Detect cycles using DFS with coloring.
detect_cycles(&known_ids, &deps)?;
Ok(())
}
/// Collect all workflow IDs referenced by `type: workflow` steps.
fn collect_workflow_refs<'a>(steps: &'a [YamlStep], refs: &mut HashSet<&'a str>) {
for step in steps {
if step.step_type.as_deref() == Some("workflow")
&& let Some(ref config) = step.config
&& let Some(ref wf_id) = config.child_workflow
{
refs.insert(wf_id.as_str());
}
if let Some(ref children) = step.parallel {
collect_workflow_refs(children, refs);
}
if let Some(ref hook) = step.on_success {
collect_workflow_refs(std::slice::from_ref(hook.as_ref()), refs);
}
if let Some(ref hook) = step.on_failure {
collect_workflow_refs(std::slice::from_ref(hook.as_ref()), refs);
}
if let Some(ref hook) = step.ensure {
collect_workflow_refs(std::slice::from_ref(hook.as_ref()), refs);
}
}
}
/// Detect circular references in the workflow dependency graph.
fn detect_cycles(
known_ids: &HashSet<&str>,
deps: &HashMap<&str, HashSet<&str>>,
) -> Result<(), YamlWorkflowError> {
#[derive(Clone, Copy, PartialEq)]
enum Color {
White,
Gray,
Black,
}
let mut colors: HashMap<&str, Color> = known_ids.iter().map(|id| (*id, Color::White)).collect();
fn dfs<'a>(
node: &'a str,
deps: &HashMap<&str, HashSet<&'a str>>,
colors: &mut HashMap<&'a str, Color>,
path: &mut Vec<&'a str>,
) -> Result<(), YamlWorkflowError> {
colors.insert(node, Color::Gray);
path.push(node);
if let Some(neighbors) = deps.get(node) {
for &neighbor in neighbors {
match colors.get(neighbor) {
Some(Color::Gray) => {
// Found a cycle. Build the cycle path for the error message.
let cycle_start = path.iter().position(|&n| n == neighbor).unwrap();
let cycle: Vec<&str> = path[cycle_start..].to_vec();
return Err(YamlWorkflowError::Validation(format!(
"Circular workflow reference detected: {} -> {}",
cycle.join(" -> "),
neighbor
)));
}
Some(Color::White) | None => {
// Only recurse into nodes that are in our known set.
if colors.contains_key(neighbor) {
dfs(neighbor, deps, colors, path)?;
}
}
Some(Color::Black) => {
// Already fully processed, skip.
}
}
}
}
path.pop();
colors.insert(node, Color::Black);
Ok(())
}
let nodes: Vec<&str> = known_ids.iter().copied().collect();
for node in nodes {
if colors.get(node) == Some(&Color::White) {
let mut path = Vec::new();
dfs(node, deps, &mut colors, &mut path)?;
}
}
Ok(())
}
fn validate_steps( fn validate_steps(
steps: &[YamlStep], steps: &[YamlStep],
seen_names: &mut HashSet<String>, seen_names: &mut HashSet<String>,
@@ -173,6 +307,24 @@ fn validate_steps(
} }
} }
// Workflow steps must have config.workflow.
if let Some(ref step_type) = step.step_type
&& step_type == "workflow"
{
let config = step.config.as_ref().ok_or_else(|| {
YamlWorkflowError::Validation(format!(
"Workflow step '{}' must have a 'config' section",
step.name
))
})?;
if config.child_workflow.is_none() {
return Err(YamlWorkflowError::Validation(format!(
"Workflow step '{}' must have 'config.workflow'",
step.name
)));
}
}
// Validate step-level error behavior. // Validate step-level error behavior.
if let Some(ref eb) = step.error_behavior { if let Some(ref eb) = step.error_behavior {
validate_error_behavior_type(&eb.behavior_type)?; validate_error_behavior_type(&eb.behavior_type)?;

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::time::Duration; use std::time::Duration;
use wfe_core::models::error_behavior::ErrorBehavior; use wfe_core::models::error_behavior::ErrorBehavior;
use wfe_yaml::load_workflow_from_str; use wfe_yaml::{load_single_workflow_from_str, load_workflow_from_str};
#[test] #[test]
fn single_step_produces_one_workflow_step() { fn single_step_produces_one_workflow_step() {
@@ -16,7 +16,7 @@ workflow:
config: config:
run: echo hello run: echo hello
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// The definition should have exactly 1 main step. // The definition should have exactly 1 main step.
let main_steps: Vec<_> = compiled let main_steps: Vec<_> = compiled
.definition .definition
@@ -44,7 +44,7 @@ workflow:
config: config:
run: echo b run: echo b
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step_a = compiled let step_a = compiled
.definition .definition
@@ -82,7 +82,7 @@ workflow:
config: config:
run: echo b run: echo b
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled let container = compiled
.definition .definition
@@ -116,7 +116,7 @@ workflow:
config: config:
run: rollback.sh run: rollback.sh
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled let deploy = compiled
.definition .definition
@@ -156,7 +156,7 @@ workflow:
error_behavior: error_behavior:
type: suspend type: suspend
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.default_error_behavior, compiled.definition.default_error_behavior,
@@ -193,7 +193,7 @@ workflow:
type: shell type: shell
config: *default_config config: *default_config
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have 2 main steps + factories. // Should have 2 main steps + factories.
let build_step = compiled let build_step = compiled
@@ -241,7 +241,7 @@ workflow:
config: config:
run: echo "build succeeded" run: echo "build succeeded"
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let build = compiled let build = compiled
.definition .definition
@@ -279,7 +279,7 @@ workflow:
config: config:
run: cleanup.sh run: cleanup.sh
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled let deploy = compiled
.definition .definition
@@ -322,7 +322,7 @@ workflow:
config: config:
run: cleanup.sh run: cleanup.sh
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let deploy = compiled let deploy = compiled
.definition .definition
@@ -351,7 +351,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.default_error_behavior, compiled.definition.default_error_behavior,
ErrorBehavior::Terminate ErrorBehavior::Terminate
@@ -372,7 +372,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.default_error_behavior, compiled.definition.default_error_behavior,
ErrorBehavior::Compensate ErrorBehavior::Compensate
@@ -394,7 +394,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.default_error_behavior, compiled.definition.default_error_behavior,
ErrorBehavior::Retry { ErrorBehavior::Retry {
@@ -420,7 +420,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.default_error_behavior, compiled.definition.default_error_behavior,
ErrorBehavior::Retry { ErrorBehavior::Retry {
@@ -447,7 +447,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.default_error_behavior, compiled.definition.default_error_behavior,
ErrorBehavior::Retry { ErrorBehavior::Retry {
@@ -471,7 +471,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let result = load_workflow_from_str(yaml, &HashMap::new()); let result = load_single_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err()); assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") }; let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!(err.contains("explode"), "Error should mention the invalid type, got: {err}"); assert!(err.contains("explode"), "Error should mention the invalid type, got: {err}");
@@ -499,7 +499,7 @@ workflow:
config: config:
run: echo c run: echo c
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled let container = compiled
.definition .definition
@@ -541,7 +541,7 @@ workflow:
RUST_LOG: debug RUST_LOG: debug
working_dir: /tmp working_dir: /tmp
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled let step = compiled
.definition .definition
@@ -571,7 +571,7 @@ workflow:
config: config:
file: my_script.sh file: my_script.sh
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled let step = compiled
.definition .definition
.steps .steps
@@ -596,7 +596,7 @@ workflow:
config: config:
run: echo hello run: echo hello
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled let step = compiled
.definition .definition
.steps .steps
@@ -626,7 +626,7 @@ workflow:
config: config:
run: rollback.sh run: rollback.sh
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have factories for both deploy and rollback. // Should have factories for both deploy and rollback.
let has_deploy = compiled let has_deploy = compiled
@@ -658,7 +658,7 @@ workflow:
config: config:
run: echo ok run: echo ok
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_notify = compiled let has_notify = compiled
.step_factories .step_factories
@@ -684,7 +684,7 @@ workflow:
config: config:
run: cleanup.sh run: cleanup.sh
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_cleanup = compiled let has_cleanup = compiled
.step_factories .step_factories
@@ -713,7 +713,7 @@ workflow:
config: config:
run: echo b run: echo b
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let container = compiled let container = compiled
.definition .definition
@@ -746,7 +746,7 @@ workflow:
config: config:
run: echo b run: echo b
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step_a = compiled let step_a = compiled
.definition .definition
@@ -787,7 +787,7 @@ workflow:
config: config:
run: echo hi run: echo hi
"#; "#;
let compiled = load_workflow_from_str(yaml, &HashMap::new()).unwrap(); let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!( assert_eq!(
compiled.definition.description.as_deref(), compiled.definition.description.as_deref(),
Some("A test workflow") Some("A test workflow")
@@ -804,7 +804,7 @@ workflow:
- name: bad-step - name: bad-step
type: shell type: shell
"#; "#;
let result = load_workflow_from_str(yaml, &HashMap::new()); let result = load_single_workflow_from_str(yaml, &HashMap::new());
assert!(result.is_err()); assert!(result.is_err());
let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") }; let err = match result { Err(e) => e.to_string(), Ok(_) => panic!("expected error") };
assert!( assert!(
@@ -812,3 +812,204 @@ workflow:
"Error should mention missing config, got: {err}" "Error should mention missing config, got: {err}"
); );
} }
// --- Workflow step compilation tests ---
#[test]
fn workflow_step_compiles_correctly() {
let yaml = r#"
workflow:
id: parent-wf
version: 1
steps:
- name: run-child
type: workflow
config:
workflow: child-wf
workflow_version: 3
outputs:
- name: result
- name: status
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-child"))
.unwrap();
assert!(step.step_type.contains("workflow"));
assert!(step.step_config.is_some());
// Verify the serialized config contains the workflow_id and version.
let config: serde_json::Value = step.step_config.clone().unwrap();
assert_eq!(config["workflow_id"].as_str(), Some("child-wf"));
assert_eq!(config["version"].as_u64(), Some(3));
assert_eq!(config["output_keys"].as_array().unwrap().len(), 2);
}
#[test]
fn workflow_step_version_defaults_to_1() {
let yaml = r#"
workflow:
id: parent-wf
version: 1
steps:
- name: run-child
type: workflow
config:
workflow: child-wf
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let step = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-child"))
.unwrap();
let config: serde_json::Value = step.step_config.clone().unwrap();
assert_eq!(config["version"].as_u64(), Some(1));
}
#[test]
fn workflow_step_factory_is_registered() {
let yaml = r#"
workflow:
id: parent-wf
version: 1
steps:
- name: run-child
type: workflow
config:
workflow: child-wf
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
let has_workflow_factory = compiled
.step_factories
.iter()
.any(|(key, _)| key.contains("workflow") && key.contains("run-child"));
assert!(
has_workflow_factory,
"Should have factory for workflow step"
);
}
#[test]
fn compile_multi_workflow_file() {
let yaml = r#"
workflows:
- id: build
version: 1
steps:
- name: compile
type: shell
config:
run: cargo build
- id: test
version: 1
steps:
- name: run-tests
type: shell
config:
run: cargo test
"#;
let workflows = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(workflows.len(), 2);
assert_eq!(workflows[0].definition.id, "build");
assert_eq!(workflows[1].definition.id, "test");
}
#[test]
fn compile_multi_workflow_with_cross_references() {
let yaml = r#"
workflows:
- id: pipeline
version: 1
steps:
- name: run-build
type: workflow
config:
workflow: build
- id: build
version: 1
steps:
- name: compile
type: shell
config:
run: cargo build
"#;
let workflows = load_workflow_from_str(yaml, &HashMap::new()).unwrap();
assert_eq!(workflows.len(), 2);
// The pipeline workflow should have a workflow step.
let pipeline = &workflows[0];
let step = pipeline
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-build"))
.unwrap();
assert!(step.step_type.contains("workflow"));
}
#[test]
fn workflow_step_with_mixed_steps() {
let yaml = r#"
workflow:
id: mixed-wf
version: 1
steps:
- name: setup
type: shell
config:
run: echo setup
- name: run-child
type: workflow
config:
workflow: child-wf
- name: cleanup
type: shell
config:
run: echo cleanup
"#;
let compiled = load_single_workflow_from_str(yaml, &HashMap::new()).unwrap();
// Should have 3 main steps.
let step_names: Vec<_> = compiled
.definition
.steps
.iter()
.filter_map(|s| s.name.as_deref())
.collect();
assert!(step_names.contains(&"setup"));
assert!(step_names.contains(&"run-child"));
assert!(step_names.contains(&"cleanup"));
// setup -> run-child -> cleanup wiring.
let setup = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("setup"))
.unwrap();
let run_child = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("run-child"))
.unwrap();
let cleanup = compiled
.definition
.steps
.iter()
.find(|s| s.name.as_deref() == Some("cleanup"))
.unwrap();
assert_eq!(setup.outcomes[0].next_step, run_child.id);
assert_eq!(run_child.outcomes[0].next_step, cleanup.id);
}

File diff suppressed because it is too large Load Diff