feat(wfe): integrate workflow engine for up, seed, verify, bootstrap
Dispatch `sunbeam up`, `sunbeam seed`, `sunbeam verify`, and `sunbeam bootstrap` through WFE workflows instead of monolithic functions. Steps communicate via JSON workflow data and each workflow is persisted in a per-context SQLite database.
This commit is contained in:
616
Cargo.lock
generated
616
Cargo.lock
generated
@@ -158,6 +158,12 @@ dependencies = [
|
|||||||
"password-hash",
|
"password-hash",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "arraydeque"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asn1-rs"
|
name = "asn1-rs"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
@@ -252,6 +258,15 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "atoi"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528"
|
||||||
|
dependencies = [
|
||||||
|
"num-traits",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "atomic-waker"
|
name = "atomic-waker"
|
||||||
version = "1.1.2"
|
version = "1.1.2"
|
||||||
@@ -606,6 +621,21 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc"
|
||||||
|
version = "3.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
|
||||||
|
dependencies = [
|
||||||
|
"crc-catalog",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crc-catalog"
|
||||||
|
version = "2.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crc32fast"
|
name = "crc32fast"
|
||||||
version = "1.5.0"
|
version = "1.5.0"
|
||||||
@@ -615,6 +645,15 @@ dependencies = [
|
|||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "crossbeam-queue"
|
||||||
|
version = "0.3.12"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
|
||||||
|
dependencies = [
|
||||||
|
"crossbeam-utils",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crossbeam-utils"
|
name = "crossbeam-utils"
|
||||||
version = "0.8.21"
|
version = "0.8.21"
|
||||||
@@ -843,6 +882,12 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dotenvy"
|
||||||
|
version = "0.15.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dunce"
|
name = "dunce"
|
||||||
version = "1.0.5"
|
version = "1.0.5"
|
||||||
@@ -911,6 +956,9 @@ name = "either"
|
|||||||
version = "1.15.0"
|
version = "1.15.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "elliptic-curve"
|
name = "elliptic-curve"
|
||||||
@@ -955,6 +1003,15 @@ version = "1.0.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
|
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "encoding_rs"
|
||||||
|
version = "0.8.35"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "enum-ordinalize"
|
name = "enum-ordinalize"
|
||||||
version = "4.3.2"
|
version = "4.3.2"
|
||||||
@@ -991,6 +1048,17 @@ dependencies = [
|
|||||||
"windows-sys 0.61.2",
|
"windows-sys 0.61.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "etcetera"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"home",
|
||||||
|
"windows-sys 0.48.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "event-listener"
|
name = "event-listener"
|
||||||
version = "5.4.1"
|
version = "5.4.1"
|
||||||
@@ -1061,6 +1129,17 @@ dependencies = [
|
|||||||
"miniz_oxide",
|
"miniz_oxide",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "flume"
|
||||||
|
version = "0.11.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095"
|
||||||
|
dependencies = [
|
||||||
|
"futures-core",
|
||||||
|
"futures-sink",
|
||||||
|
"spin",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flurry"
|
name = "flurry"
|
||||||
version = "0.5.2"
|
version = "0.5.2"
|
||||||
@@ -1142,6 +1221,17 @@ dependencies = [
|
|||||||
"futures-util",
|
"futures-util",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures-intrusive"
|
||||||
|
version = "0.5.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f"
|
||||||
|
dependencies = [
|
||||||
|
"futures-core",
|
||||||
|
"lock_api",
|
||||||
|
"parking_lot",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-io"
|
name = "futures-io"
|
||||||
version = "0.3.32"
|
version = "0.3.32"
|
||||||
@@ -1318,6 +1408,15 @@ version = "0.16.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
|
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hashlink"
|
||||||
|
version = "0.10.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
|
||||||
|
dependencies = [
|
||||||
|
"hashbrown 0.15.5",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "headers"
|
name = "headers"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
@@ -1354,6 +1453,12 @@ version = "0.5.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
|
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "hex"
|
||||||
|
version = "0.4.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hex-literal"
|
name = "hex-literal"
|
||||||
version = "0.4.1"
|
version = "0.4.1"
|
||||||
@@ -1502,7 +1607,7 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
"webpki-roots",
|
"webpki-roots 1.0.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1553,7 +1658,7 @@ dependencies = [
|
|||||||
"js-sys",
|
"js-sys",
|
||||||
"log",
|
"log",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"windows-core 0.62.2",
|
"windows-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1963,7 +2068,7 @@ dependencies = [
|
|||||||
"tokio",
|
"tokio",
|
||||||
"tokio-rustls",
|
"tokio-rustls",
|
||||||
"url",
|
"url",
|
||||||
"webpki-roots",
|
"webpki-roots 1.0.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1990,6 +2095,17 @@ dependencies = [
|
|||||||
"redox_syscall 0.7.3",
|
"redox_syscall 0.7.3",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libsqlite3-sys"
|
||||||
|
version = "0.30.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"pkg-config",
|
||||||
|
"vcpkg",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.12.1"
|
version = "0.12.1"
|
||||||
@@ -2032,6 +2148,16 @@ dependencies = [
|
|||||||
"regex-automata",
|
"regex-automata",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "md-5"
|
||||||
|
version = "0.10.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"digest",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "md5"
|
name = "md5"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
@@ -2488,6 +2614,12 @@ dependencies = [
|
|||||||
"spki",
|
"spki",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pkg-config"
|
||||||
|
version = "0.3.32"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "plain"
|
name = "plain"
|
||||||
version = "0.2.3"
|
version = "0.2.3"
|
||||||
@@ -2835,7 +2967,7 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
"wasm-bindgen-futures",
|
"wasm-bindgen-futures",
|
||||||
"web-sys",
|
"web-sys",
|
||||||
"webpki-roots",
|
"webpki-roots 1.0.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3435,6 +3567,9 @@ name = "smallvec"
|
|||||||
version = "1.15.1"
|
version = "1.15.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||||
|
dependencies = [
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "socket2"
|
name = "socket2"
|
||||||
@@ -3451,6 +3586,9 @@ name = "spin"
|
|||||||
version = "0.9.8"
|
version = "0.9.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
||||||
|
dependencies = [
|
||||||
|
"lock_api",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "spki"
|
name = "spki"
|
||||||
@@ -3462,6 +3600,204 @@ dependencies = [
|
|||||||
"der",
|
"der",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc"
|
||||||
|
dependencies = [
|
||||||
|
"sqlx-core",
|
||||||
|
"sqlx-macros",
|
||||||
|
"sqlx-mysql",
|
||||||
|
"sqlx-postgres",
|
||||||
|
"sqlx-sqlite",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx-core"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6"
|
||||||
|
dependencies = [
|
||||||
|
"base64",
|
||||||
|
"bytes",
|
||||||
|
"chrono",
|
||||||
|
"crc",
|
||||||
|
"crossbeam-queue",
|
||||||
|
"either",
|
||||||
|
"event-listener",
|
||||||
|
"futures-core",
|
||||||
|
"futures-intrusive",
|
||||||
|
"futures-io",
|
||||||
|
"futures-util",
|
||||||
|
"hashbrown 0.15.5",
|
||||||
|
"hashlink",
|
||||||
|
"indexmap",
|
||||||
|
"log",
|
||||||
|
"memchr",
|
||||||
|
"once_cell",
|
||||||
|
"percent-encoding",
|
||||||
|
"rustls",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sha2",
|
||||||
|
"smallvec",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tokio",
|
||||||
|
"tokio-stream",
|
||||||
|
"tracing",
|
||||||
|
"url",
|
||||||
|
"uuid",
|
||||||
|
"webpki-roots 0.26.11",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx-macros"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"sqlx-core",
|
||||||
|
"sqlx-macros-core",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx-macros-core"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b"
|
||||||
|
dependencies = [
|
||||||
|
"dotenvy",
|
||||||
|
"either",
|
||||||
|
"heck",
|
||||||
|
"hex",
|
||||||
|
"once_cell",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sha2",
|
||||||
|
"sqlx-core",
|
||||||
|
"sqlx-mysql",
|
||||||
|
"sqlx-postgres",
|
||||||
|
"sqlx-sqlite",
|
||||||
|
"syn",
|
||||||
|
"tokio",
|
||||||
|
"url",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx-mysql"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526"
|
||||||
|
dependencies = [
|
||||||
|
"atoi",
|
||||||
|
"base64",
|
||||||
|
"bitflags",
|
||||||
|
"byteorder",
|
||||||
|
"bytes",
|
||||||
|
"chrono",
|
||||||
|
"crc",
|
||||||
|
"digest",
|
||||||
|
"dotenvy",
|
||||||
|
"either",
|
||||||
|
"futures-channel",
|
||||||
|
"futures-core",
|
||||||
|
"futures-io",
|
||||||
|
"futures-util",
|
||||||
|
"generic-array",
|
||||||
|
"hex",
|
||||||
|
"hkdf",
|
||||||
|
"hmac",
|
||||||
|
"itoa",
|
||||||
|
"log",
|
||||||
|
"md-5",
|
||||||
|
"memchr",
|
||||||
|
"once_cell",
|
||||||
|
"percent-encoding",
|
||||||
|
"rand 0.8.5",
|
||||||
|
"rsa",
|
||||||
|
"serde",
|
||||||
|
"sha1",
|
||||||
|
"sha2",
|
||||||
|
"smallvec",
|
||||||
|
"sqlx-core",
|
||||||
|
"stringprep",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tracing",
|
||||||
|
"uuid",
|
||||||
|
"whoami",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx-postgres"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46"
|
||||||
|
dependencies = [
|
||||||
|
"atoi",
|
||||||
|
"base64",
|
||||||
|
"bitflags",
|
||||||
|
"byteorder",
|
||||||
|
"chrono",
|
||||||
|
"crc",
|
||||||
|
"dotenvy",
|
||||||
|
"etcetera",
|
||||||
|
"futures-channel",
|
||||||
|
"futures-core",
|
||||||
|
"futures-util",
|
||||||
|
"hex",
|
||||||
|
"hkdf",
|
||||||
|
"hmac",
|
||||||
|
"home",
|
||||||
|
"itoa",
|
||||||
|
"log",
|
||||||
|
"md-5",
|
||||||
|
"memchr",
|
||||||
|
"once_cell",
|
||||||
|
"rand 0.8.5",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sha2",
|
||||||
|
"smallvec",
|
||||||
|
"sqlx-core",
|
||||||
|
"stringprep",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tracing",
|
||||||
|
"uuid",
|
||||||
|
"whoami",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "sqlx-sqlite"
|
||||||
|
version = "0.8.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea"
|
||||||
|
dependencies = [
|
||||||
|
"atoi",
|
||||||
|
"chrono",
|
||||||
|
"flume",
|
||||||
|
"futures-channel",
|
||||||
|
"futures-core",
|
||||||
|
"futures-executor",
|
||||||
|
"futures-intrusive",
|
||||||
|
"futures-util",
|
||||||
|
"libsqlite3-sys",
|
||||||
|
"log",
|
||||||
|
"percent-encoding",
|
||||||
|
"serde",
|
||||||
|
"serde_urlencoded",
|
||||||
|
"sqlx-core",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tracing",
|
||||||
|
"url",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ssh-cipher"
|
name = "ssh-cipher"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@@ -3532,6 +3868,17 @@ dependencies = [
|
|||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "stringprep"
|
||||||
|
version = "0.1.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1"
|
||||||
|
dependencies = [
|
||||||
|
"unicode-bidi",
|
||||||
|
"unicode-normalization",
|
||||||
|
"unicode-properties",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.11.1"
|
version = "0.11.1"
|
||||||
@@ -3548,13 +3895,46 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
|||||||
name = "sunbeam"
|
name = "sunbeam"
|
||||||
version = "1.1.2"
|
version = "1.1.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"aes-gcm",
|
||||||
|
"argon2",
|
||||||
|
"async-trait",
|
||||||
|
"base64",
|
||||||
|
"bytes",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap",
|
"clap",
|
||||||
|
"dirs",
|
||||||
|
"flate2",
|
||||||
|
"futures",
|
||||||
|
"hmac",
|
||||||
|
"indicatif",
|
||||||
|
"k8s-openapi",
|
||||||
|
"kube",
|
||||||
|
"lettre",
|
||||||
|
"pkcs1",
|
||||||
|
"pkcs8",
|
||||||
|
"rand 0.8.5",
|
||||||
|
"rcgen",
|
||||||
|
"reqwest",
|
||||||
|
"rsa",
|
||||||
|
"russh",
|
||||||
|
"russh-keys",
|
||||||
"rustls",
|
"rustls",
|
||||||
"sunbeam-sdk",
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"serde_yaml",
|
||||||
|
"sha2",
|
||||||
|
"tar",
|
||||||
|
"tempfile",
|
||||||
|
"thiserror 2.0.18",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tokio-stream",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
|
"wfe",
|
||||||
|
"wfe-core",
|
||||||
|
"wfe-sqlite",
|
||||||
|
"wfe-yaml",
|
||||||
|
"wiremock",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -3988,12 +4368,33 @@ version = "0.1.7"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
|
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-bidi"
|
||||||
|
version = "0.3.18"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.24"
|
version = "1.0.24"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
|
checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-normalization"
|
||||||
|
version = "0.1.25"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8"
|
||||||
|
dependencies = [
|
||||||
|
"tinyvec",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicode-properties"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-width"
|
name = "unicode-width"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
@@ -4058,12 +4459,30 @@ version = "0.2.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "uuid"
|
||||||
|
version = "1.23.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9"
|
||||||
|
dependencies = [
|
||||||
|
"getrandom 0.4.2",
|
||||||
|
"js-sys",
|
||||||
|
"serde_core",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "valuable"
|
name = "valuable"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
|
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vcpkg"
|
||||||
|
version = "0.2.15"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
version = "0.9.5"
|
version = "0.9.5"
|
||||||
@@ -4103,6 +4522,12 @@ dependencies = [
|
|||||||
"wit-bindgen",
|
"wit-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasite"
|
||||||
|
version = "0.1.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.114"
|
version = "0.2.114"
|
||||||
@@ -4216,6 +4641,15 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "webpki-roots"
|
||||||
|
version = "0.26.11"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
|
||||||
|
dependencies = [
|
||||||
|
"webpki-roots 1.0.6",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "webpki-roots"
|
name = "webpki-roots"
|
||||||
version = "1.0.6"
|
version = "1.0.6"
|
||||||
@@ -4225,6 +4659,89 @@ dependencies = [
|
|||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wfe"
|
||||||
|
version = "1.6.2"
|
||||||
|
source = "sparse+https://src.sunbeam.pt/api/packages/studio/cargo/"
|
||||||
|
checksum = "9634469c5e6c76f480d505cbe48df5e993b4d4bc69174d70099740be19b5858b"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"chrono",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"tracing",
|
||||||
|
"tracing-subscriber",
|
||||||
|
"uuid",
|
||||||
|
"wfe-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wfe-core"
|
||||||
|
version = "1.6.2"
|
||||||
|
source = "sparse+https://src.sunbeam.pt/api/packages/studio/cargo/"
|
||||||
|
checksum = "cd141154a5082c6f13f025599bbaa1fa0e0b04b807dc448d36ce2d324054b532"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"chrono",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"tracing",
|
||||||
|
"uuid",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wfe-sqlite"
|
||||||
|
version = "1.6.2"
|
||||||
|
source = "sparse+https://src.sunbeam.pt/api/packages/studio/cargo/"
|
||||||
|
checksum = "b5d6bf0ef47e3290bcf9b69844f8ad4e9844c47e676a73990e8c07ab9843e554"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"chrono",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sqlx",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
|
"uuid",
|
||||||
|
"wfe-core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wfe-yaml"
|
||||||
|
version = "1.6.2"
|
||||||
|
source = "sparse+https://src.sunbeam.pt/api/packages/studio/cargo/"
|
||||||
|
checksum = "ee81e97065a8a53964796d6f0c5d035a84f6632bb8847343c145a5323ff27608"
|
||||||
|
dependencies = [
|
||||||
|
"async-trait",
|
||||||
|
"chrono",
|
||||||
|
"regex",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"serde_yaml",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
|
"wfe-core",
|
||||||
|
"yaml-merge-keys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "whoami"
|
||||||
|
version = "1.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d"
|
||||||
|
dependencies = [
|
||||||
|
"libredox",
|
||||||
|
"wasite",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "winapi"
|
name = "winapi"
|
||||||
version = "0.3.9"
|
version = "0.3.9"
|
||||||
@@ -4253,7 +4770,7 @@ version = "0.58.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
|
checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-core 0.58.0",
|
"windows-core",
|
||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4263,26 +4780,13 @@ version = "0.58.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
|
checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-implement 0.58.0",
|
"windows-implement",
|
||||||
"windows-interface 0.58.0",
|
"windows-interface",
|
||||||
"windows-result 0.2.0",
|
"windows-result",
|
||||||
"windows-strings 0.1.0",
|
"windows-strings",
|
||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-core"
|
|
||||||
version = "0.62.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb"
|
|
||||||
dependencies = [
|
|
||||||
"windows-implement 0.60.2",
|
|
||||||
"windows-interface 0.59.3",
|
|
||||||
"windows-link",
|
|
||||||
"windows-result 0.4.1",
|
|
||||||
"windows-strings 0.5.1",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-implement"
|
name = "windows-implement"
|
||||||
version = "0.58.0"
|
version = "0.58.0"
|
||||||
@@ -4294,17 +4798,6 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-implement"
|
|
||||||
version = "0.60.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-interface"
|
name = "windows-interface"
|
||||||
version = "0.58.0"
|
version = "0.58.0"
|
||||||
@@ -4316,17 +4809,6 @@ dependencies = [
|
|||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-interface"
|
|
||||||
version = "0.59.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-link"
|
name = "windows-link"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
@@ -4342,34 +4824,16 @@ dependencies = [
|
|||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-result"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5"
|
|
||||||
dependencies = [
|
|
||||||
"windows-link",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-strings"
|
name = "windows-strings"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
|
checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-result 0.2.0",
|
"windows-result",
|
||||||
"windows-targets 0.52.6",
|
"windows-targets 0.52.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-strings"
|
|
||||||
version = "0.5.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091"
|
|
||||||
dependencies = [
|
|
||||||
"windows-link",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.48.0"
|
version = "0.48.0"
|
||||||
@@ -4746,6 +5210,30 @@ dependencies = [
|
|||||||
"rustix",
|
"rustix",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yaml-merge-keys"
|
||||||
|
version = "0.8.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9a4ab063eb8d1cdf00750f0a5c21a958189ad17ee56a4d415b96181fba61ea31"
|
||||||
|
dependencies = [
|
||||||
|
"hashbrown 0.15.5",
|
||||||
|
"lazy_static",
|
||||||
|
"serde_yaml",
|
||||||
|
"thiserror 2.0.18",
|
||||||
|
"yaml-rust2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yaml-rust2"
|
||||||
|
version = "0.10.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "2462ea039c445496d8793d052e13787f2b90e750b833afee748e601c17621ed9"
|
||||||
|
dependencies = [
|
||||||
|
"arraydeque",
|
||||||
|
"encoding_rs",
|
||||||
|
"hashlink",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yasna"
|
name = "yasna"
|
||||||
version = "0.5.2"
|
version = "0.5.2"
|
||||||
|
|||||||
87
Cargo.toml
87
Cargo.toml
@@ -1,4 +1,87 @@
|
|||||||
|
[package]
|
||||||
|
name = "sunbeam"
|
||||||
|
version = "1.1.2"
|
||||||
|
edition = "2024"
|
||||||
|
description = "Sunbeam Studios CLI"
|
||||||
|
repository = "https://src.sunbeam.pt/studio/cli"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
|
[[bin]]
|
||||||
|
name = "sunbeam"
|
||||||
|
path = "src/main.rs"
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["sunbeam-sdk", "sunbeam"]
|
members = ["sunbeam-sdk"]
|
||||||
exclude = ["vendor"]
|
|
||||||
resolver = "3"
|
resolver = "3"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# Core
|
||||||
|
thiserror = "2"
|
||||||
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
clap = { version = "4", features = ["derive"] }
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
serde_yaml = "0.9"
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
|
rustls = { version = "0.23", features = ["ring"] }
|
||||||
|
|
||||||
|
# Kubernetes
|
||||||
|
kube = { version = "0.99", features = ["client", "runtime", "derive", "ws"] }
|
||||||
|
k8s-openapi = { version = "0.24", features = ["v1_32"] }
|
||||||
|
|
||||||
|
# HTTP + TLS
|
||||||
|
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||||
|
bytes = "1"
|
||||||
|
|
||||||
|
# SSH
|
||||||
|
russh = "0.46"
|
||||||
|
russh-keys = "0.46"
|
||||||
|
|
||||||
|
# Crypto
|
||||||
|
rsa = "0.9"
|
||||||
|
pkcs8 = { version = "0.10", features = ["pem"] }
|
||||||
|
pkcs1 = { version = "0.7", features = ["pem"] }
|
||||||
|
sha2 = "0.10"
|
||||||
|
hmac = "0.12"
|
||||||
|
base64 = "0.22"
|
||||||
|
rand = "0.8"
|
||||||
|
aes-gcm = "0.10"
|
||||||
|
argon2 = "0.5"
|
||||||
|
indicatif = { version = "0.17", features = ["tokio"] }
|
||||||
|
|
||||||
|
# Certificate generation
|
||||||
|
rcgen = "0.14"
|
||||||
|
|
||||||
|
# SMTP
|
||||||
|
lettre = { version = "0.11", default-features = false, features = ["smtp-transport", "tokio1-rustls-tls", "builder", "hostname"] }
|
||||||
|
|
||||||
|
# Archive handling
|
||||||
|
flate2 = "1"
|
||||||
|
tar = "0.4"
|
||||||
|
|
||||||
|
# Async
|
||||||
|
futures = "0.3"
|
||||||
|
tokio-stream = "0.1"
|
||||||
|
|
||||||
|
# Utility
|
||||||
|
tempfile = "3"
|
||||||
|
dirs = "5"
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
|
||||||
|
# Workflow engine
|
||||||
|
wfe = { version = "1.6.2", registry = "sunbeam" }
|
||||||
|
wfe-core = { version = "1.6.2", registry = "sunbeam", features = ["test-support"] }
|
||||||
|
wfe-sqlite = { version = "1.6.2", registry = "sunbeam" }
|
||||||
|
wfe-yaml = { version = "1.6.2", registry = "sunbeam" }
|
||||||
|
async-trait = "0.1"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
wiremock = "0.6"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
reqwest = { version = "0.12", default-features = false, features = ["blocking", "rustls-tls"] }
|
||||||
|
sha2 = "0.10"
|
||||||
|
flate2 = "1"
|
||||||
|
tar = "0.4"
|
||||||
|
chrono = "0.4"
|
||||||
|
|||||||
@@ -921,7 +921,7 @@ pub async fn dispatch() -> Result<()> {
|
|||||||
let instance = wfe::run_workflow_sync(
|
let instance = wfe::run_workflow_sync(
|
||||||
&host,
|
&host,
|
||||||
"up",
|
"up",
|
||||||
1,
|
2,
|
||||||
initial_data,
|
initial_data,
|
||||||
std::time::Duration::from_secs(3600),
|
std::time::Duration::from_secs(3600),
|
||||||
)
|
)
|
||||||
@@ -1005,7 +1005,7 @@ pub async fn dispatch() -> Result<()> {
|
|||||||
let instance = wfe::run_workflow_sync(
|
let instance = wfe::run_workflow_sync(
|
||||||
&host,
|
&host,
|
||||||
"seed",
|
"seed",
|
||||||
1,
|
2,
|
||||||
initial_data,
|
initial_data,
|
||||||
std::time::Duration::from_secs(900),
|
std::time::Duration::from_secs(900),
|
||||||
)
|
)
|
||||||
|
|||||||
100
src/cluster.rs
100
src/cluster.rs
@@ -1,4 +1,4 @@
|
|||||||
//! Cluster lifecycle — cert-manager, Linkerd, TLS, core service readiness.
|
//! Cluster lifecycle — cert-manager, TLS, core service readiness.
|
||||||
//!
|
//!
|
||||||
//! Pure K8s implementation: no Lima VM operations.
|
//! Pure K8s implementation: no Lima VM operations.
|
||||||
|
|
||||||
@@ -6,13 +6,10 @@ use crate::constants::GITEA_ADMIN_USER;
|
|||||||
use crate::error::{Result, ResultExt, SunbeamError};
|
use crate::error::{Result, ResultExt, SunbeamError};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
const CERT_MANAGER_URL: &str =
|
pub(crate) const CERT_MANAGER_URL: &str =
|
||||||
"https://github.com/cert-manager/cert-manager/releases/download/v1.17.0/cert-manager.yaml";
|
"https://github.com/cert-manager/cert-manager/releases/download/v1.17.0/cert-manager.yaml";
|
||||||
|
|
||||||
const GATEWAY_API_CRDS_URL: &str =
|
pub(crate) fn secrets_dir() -> PathBuf {
|
||||||
"https://github.com/kubernetes-sigs/gateway-api/releases/download/v1.4.0/standard-install.yaml";
|
|
||||||
|
|
||||||
fn secrets_dir() -> PathBuf {
|
|
||||||
crate::config::get_infra_dir()
|
crate::config::get_infra_dir()
|
||||||
.join("secrets")
|
.join("secrets")
|
||||||
.join("local")
|
.join("local")
|
||||||
@@ -56,77 +53,11 @@ async fn ensure_cert_manager() -> Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// Linkerd
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
async fn ensure_linkerd() -> Result<()> {
|
|
||||||
crate::output::step("Linkerd...");
|
|
||||||
|
|
||||||
if crate::kube::ns_exists("linkerd").await? {
|
|
||||||
crate::output::ok("Already installed.");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gateway API CRDs
|
|
||||||
crate::output::ok("Installing Gateway API CRDs...");
|
|
||||||
let gateway_body = reqwest::get(GATEWAY_API_CRDS_URL)
|
|
||||||
.await
|
|
||||||
.ctx("Failed to download Gateway API CRDs")?
|
|
||||||
.text()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Gateway API CRDs require server-side apply; kube_apply already does SSA
|
|
||||||
crate::kube::kube_apply(&gateway_body).await?;
|
|
||||||
|
|
||||||
// Linkerd CRDs via subprocess (no pure HTTP source for linkerd manifests)
|
|
||||||
crate::output::ok("Installing Linkerd CRDs...");
|
|
||||||
let crds_output = tokio::process::Command::new("linkerd")
|
|
||||||
.args(["install", "--crds"])
|
|
||||||
.output()
|
|
||||||
.await
|
|
||||||
.ctx("Failed to run `linkerd install --crds`")?;
|
|
||||||
|
|
||||||
if !crds_output.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&crds_output.stderr);
|
|
||||||
return Err(SunbeamError::tool("linkerd", format!("install --crds failed: {stderr}")));
|
|
||||||
}
|
|
||||||
let crds = String::from_utf8_lossy(&crds_output.stdout);
|
|
||||||
crate::kube::kube_apply(&crds).await?;
|
|
||||||
|
|
||||||
// Linkerd control plane
|
|
||||||
crate::output::ok("Installing Linkerd control plane...");
|
|
||||||
let cp_output = tokio::process::Command::new("linkerd")
|
|
||||||
.args(["install"])
|
|
||||||
.output()
|
|
||||||
.await
|
|
||||||
.ctx("Failed to run `linkerd install`")?;
|
|
||||||
|
|
||||||
if !cp_output.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cp_output.stderr);
|
|
||||||
return Err(SunbeamError::tool("linkerd", format!("install failed: {stderr}")));
|
|
||||||
}
|
|
||||||
let cp = String::from_utf8_lossy(&cp_output.stdout);
|
|
||||||
crate::kube::kube_apply(&cp).await?;
|
|
||||||
|
|
||||||
for dep in &[
|
|
||||||
"linkerd-identity",
|
|
||||||
"linkerd-destination",
|
|
||||||
"linkerd-proxy-injector",
|
|
||||||
] {
|
|
||||||
crate::output::ok(&format!("Waiting for {dep}..."));
|
|
||||||
wait_rollout("linkerd", dep, 120).await?;
|
|
||||||
}
|
|
||||||
|
|
||||||
crate::output::ok("Installed.");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// TLS certificate (rcgen)
|
// TLS certificate (rcgen)
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async fn ensure_tls_cert(domain: &str) -> Result<()> {
|
pub(crate) async fn ensure_tls_cert(domain: &str) -> Result<()> {
|
||||||
crate::output::step("TLS certificate...");
|
crate::output::step("TLS certificate...");
|
||||||
|
|
||||||
let dir = secrets_dir();
|
let dir = secrets_dir();
|
||||||
@@ -174,7 +105,7 @@ async fn ensure_tls_cert(domain: &str) -> Result<()> {
|
|||||||
// TLS secret
|
// TLS secret
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async fn ensure_tls_secret(domain: &str) -> Result<()> {
|
pub(crate) async fn ensure_tls_secret(domain: &str) -> Result<()> {
|
||||||
crate::output::step("TLS secret...");
|
crate::output::step("TLS secret...");
|
||||||
|
|
||||||
let _ = domain; // domain used contextually above; secret uses files
|
let _ = domain; // domain used contextually above; secret uses files
|
||||||
@@ -242,7 +173,7 @@ async fn wait_for_core() -> Result<()> {
|
|||||||
// Print URLs
|
// Print URLs
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
fn print_urls(domain: &str, _gitea_admin_pass: &str) {
|
pub(crate) fn print_urls(domain: &str, _gitea_admin_pass: &str) {
|
||||||
let sep = "\u{2500}".repeat(60);
|
let sep = "\u{2500}".repeat(60);
|
||||||
println!("\n{sep}");
|
println!("\n{sep}");
|
||||||
println!(" Stack is up. Domain: {domain}");
|
println!(" Stack is up. Domain: {domain}");
|
||||||
@@ -284,7 +215,7 @@ fn print_urls(domain: &str, _gitea_admin_pass: &str) {
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Poll deployment rollout status (approximate: check Available condition).
|
/// Poll deployment rollout status (approximate: check Available condition).
|
||||||
async fn wait_rollout(ns: &str, deployment: &str, timeout_secs: u64) -> Result<()> {
|
pub(crate) async fn wait_rollout(ns: &str, deployment: &str, timeout_secs: u64) -> Result<()> {
|
||||||
use k8s_openapi::api::apps::v1::Deployment;
|
use k8s_openapi::api::apps::v1::Deployment;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
@@ -330,7 +261,6 @@ pub async fn cmd_up() -> Result<()> {
|
|||||||
let domain = crate::kube::get_domain().await?;
|
let domain = crate::kube::get_domain().await?;
|
||||||
|
|
||||||
ensure_cert_manager().await?;
|
ensure_cert_manager().await?;
|
||||||
ensure_linkerd().await?;
|
|
||||||
ensure_tls_cert(&domain).await?;
|
ensure_tls_cert(&domain).await?;
|
||||||
ensure_tls_secret(&domain).await?;
|
ensure_tls_secret(&domain).await?;
|
||||||
|
|
||||||
@@ -382,22 +312,6 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn gateway_api_crds_url_points_to_github_release() {
|
|
||||||
assert!(GATEWAY_API_CRDS_URL
|
|
||||||
.starts_with("https://github.com/kubernetes-sigs/gateway-api/"));
|
|
||||||
assert!(GATEWAY_API_CRDS_URL.contains("/releases/download/"));
|
|
||||||
assert!(GATEWAY_API_CRDS_URL.ends_with(".yaml"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn gateway_api_crds_url_has_version() {
|
|
||||||
assert!(
|
|
||||||
GATEWAY_API_CRDS_URL.contains("/v1."),
|
|
||||||
"GATEWAY_API_CRDS_URL should reference a v1.x release"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn secrets_dir_ends_with_secrets_local() {
|
fn secrets_dir_ends_with_secrets_local() {
|
||||||
let dir = secrets_dir();
|
let dir = secrets_dir();
|
||||||
|
|||||||
@@ -80,20 +80,61 @@ pub fn domain() -> &'static str {
|
|||||||
.unwrap_or("")
|
.unwrap_or("")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Central path helpers — all sunbeam state lives under ~/.sunbeam/
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Base directory for all sunbeam state: ~/.sunbeam/
|
||||||
|
pub fn sunbeam_dir() -> PathBuf {
|
||||||
|
dirs::home_dir()
|
||||||
|
.unwrap_or_else(|| PathBuf::from("."))
|
||||||
|
.join(".sunbeam")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Context-specific directory: ~/.sunbeam/{context}/
|
||||||
|
pub fn context_dir(context_name: &str) -> PathBuf {
|
||||||
|
let name = if context_name.is_empty() { "default" } else { context_name };
|
||||||
|
sunbeam_dir().join(name)
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Config file I/O
|
// Config file I/O
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
fn config_path() -> PathBuf {
|
fn config_path() -> PathBuf {
|
||||||
|
sunbeam_dir().join("config.json")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Legacy config path (~/.sunbeam.json) — used only for migration.
|
||||||
|
fn legacy_config_path() -> PathBuf {
|
||||||
dirs::home_dir()
|
dirs::home_dir()
|
||||||
.unwrap_or_else(|| PathBuf::from("."))
|
.unwrap_or_else(|| PathBuf::from("."))
|
||||||
.join(".sunbeam.json")
|
.join(".sunbeam.json")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load configuration from ~/.sunbeam.json, return default if not found.
|
/// Load configuration, return default if not found.
|
||||||
|
/// Migrates legacy ~/.sunbeam.json → ~/.sunbeam/config.json on first load.
|
||||||
/// Migrates legacy flat config to context-based format.
|
/// Migrates legacy flat config to context-based format.
|
||||||
pub fn load_config() -> SunbeamConfig {
|
pub fn load_config() -> SunbeamConfig {
|
||||||
let path = config_path();
|
let path = config_path();
|
||||||
|
|
||||||
|
// Migration: move legacy ~/.sunbeam.json → ~/.sunbeam/config.json
|
||||||
|
if !path.exists() {
|
||||||
|
let legacy = legacy_config_path();
|
||||||
|
if legacy.exists() {
|
||||||
|
if let Some(parent) = path.parent() {
|
||||||
|
let _ = std::fs::create_dir_all(parent);
|
||||||
|
}
|
||||||
|
if std::fs::copy(&legacy, &path).is_ok() {
|
||||||
|
crate::output::ok(&format!(
|
||||||
|
"Migrated config: {} → {}",
|
||||||
|
legacy.display(),
|
||||||
|
path.display()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
return SunbeamConfig::default();
|
return SunbeamConfig::default();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -309,7 +309,7 @@ pub async fn create_secret(ns: &str, name: &str, data: HashMap<String, String>)
|
|||||||
pub async fn find_pod_by_label(ns: &str, label: &str) -> Option<String> {
|
pub async fn find_pod_by_label(ns: &str, label: &str) -> Option<String> {
|
||||||
let client = get_client().await.ok()?;
|
let client = get_client().await.ok()?;
|
||||||
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
||||||
kube::Api::namespaced(client, ns);
|
kube::Api::namespaced(client.clone(), ns);
|
||||||
let lp = kube::api::ListParams::default().labels(label);
|
let lp = kube::api::ListParams::default().labels(label);
|
||||||
let pod_list = pods.list(&lp).await.ok()?;
|
let pod_list = pods.list(&lp).await.ok()?;
|
||||||
pod_list
|
pod_list
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ mod services;
|
|||||||
mod tools;
|
mod tools;
|
||||||
mod update;
|
mod update;
|
||||||
mod users;
|
mod users;
|
||||||
|
mod workflows;
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
|
|||||||
32
src/tools.rs
32
src/tools.rs
@@ -4,13 +4,39 @@ use std::path::PathBuf;
|
|||||||
static KUSTOMIZE_BIN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/kustomize"));
|
static KUSTOMIZE_BIN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/kustomize"));
|
||||||
static HELM_BIN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/helm"));
|
static HELM_BIN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/helm"));
|
||||||
|
|
||||||
fn cache_dir() -> PathBuf {
|
/// Legacy bin cache dir — used only for migration.
|
||||||
|
fn legacy_cache_dir() -> PathBuf {
|
||||||
dirs::data_dir()
|
dirs::data_dir()
|
||||||
.unwrap_or_else(|| dirs::home_dir().unwrap_or_else(|| PathBuf::from(".")))
|
.unwrap_or_else(|| dirs::home_dir().unwrap_or_else(|| PathBuf::from(".")))
|
||||||
.join("sunbeam")
|
.join("sunbeam")
|
||||||
.join("bin")
|
.join("bin")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn cache_dir() -> PathBuf {
|
||||||
|
let new_dir = crate::config::sunbeam_dir().join("bin");
|
||||||
|
|
||||||
|
// Migration: copy binaries from legacy location if new dir doesn't exist yet
|
||||||
|
if !new_dir.exists() {
|
||||||
|
let legacy = legacy_cache_dir();
|
||||||
|
if legacy.is_dir() {
|
||||||
|
let _ = std::fs::create_dir_all(&new_dir);
|
||||||
|
if let Ok(entries) = std::fs::read_dir(&legacy) {
|
||||||
|
for entry in entries.flatten() {
|
||||||
|
let dest = new_dir.join(entry.file_name());
|
||||||
|
let _ = std::fs::copy(entry.path(), &dest);
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let _ = std::fs::set_permissions(&dest, std::fs::Permissions::from_mode(0o755));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
new_dir
|
||||||
|
}
|
||||||
|
|
||||||
/// Extract an embedded binary to the cache directory if not already present.
|
/// Extract an embedded binary to the cache directory if not already present.
|
||||||
fn extract_embedded(data: &[u8], name: &str) -> Result<PathBuf> {
|
fn extract_embedded(data: &[u8], name: &str) -> Result<PathBuf> {
|
||||||
let dir = cache_dir();
|
let dir = cache_dir();
|
||||||
@@ -94,8 +120,8 @@ mod tests {
|
|||||||
fn cache_dir_ends_with_sunbeam_bin() {
|
fn cache_dir_ends_with_sunbeam_bin() {
|
||||||
let dir = cache_dir();
|
let dir = cache_dir();
|
||||||
assert!(
|
assert!(
|
||||||
dir.ends_with("sunbeam/bin"),
|
dir.ends_with(".sunbeam/bin"),
|
||||||
"cache_dir() should end with sunbeam/bin, got: {}",
|
"cache_dir() should end with .sunbeam/bin, got: {}",
|
||||||
dir.display()
|
dir.display()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,10 +56,7 @@ fn forge_url() -> String {
|
|||||||
|
|
||||||
/// Cache file location for background update checks.
|
/// Cache file location for background update checks.
|
||||||
fn update_cache_path() -> PathBuf {
|
fn update_cache_path() -> PathBuf {
|
||||||
dirs::data_dir()
|
crate::config::sunbeam_dir().join("update-check.json")
|
||||||
.unwrap_or_else(|| dirs::home_dir().unwrap_or_else(|| PathBuf::from(".")).join(".local/share"))
|
|
||||||
.join("sunbeam")
|
|
||||||
.join("update-check.json")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
71
src/workflows/bootstrap/definition.rs
Normal file
71
src/workflows/bootstrap/definition.rs
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
//! Bootstrap workflow definition — Gitea admin setup sequence.
|
||||||
|
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::WorkflowDefinition;
|
||||||
|
|
||||||
|
use super::steps;
|
||||||
|
|
||||||
|
/// Build the bootstrap workflow definition.
|
||||||
|
///
|
||||||
|
/// Steps execute sequentially:
|
||||||
|
/// 1. Get admin password from K8s secret
|
||||||
|
/// 2. Wait for Gitea pod to be ready
|
||||||
|
/// 3. Set admin password
|
||||||
|
/// 4. Mark admin as private
|
||||||
|
/// 5. Create orgs (studio, internal)
|
||||||
|
/// 6. Configure OIDC auth source
|
||||||
|
/// 7. Print result
|
||||||
|
pub fn build() -> WorkflowDefinition {
|
||||||
|
WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<steps::GetAdminPassword>()
|
||||||
|
.name("get-admin-password")
|
||||||
|
.then::<steps::WaitForGiteaPod>()
|
||||||
|
.name("wait-for-gitea-pod")
|
||||||
|
.then::<steps::SetAdminPassword>()
|
||||||
|
.name("set-admin-password")
|
||||||
|
.then::<steps::MarkAdminPrivate>()
|
||||||
|
.name("mark-admin-private")
|
||||||
|
.then::<steps::CreateOrgs>()
|
||||||
|
.name("create-orgs")
|
||||||
|
.then::<steps::ConfigureOIDC>()
|
||||||
|
.name("configure-oidc")
|
||||||
|
.then::<steps::PrintBootstrapResult>()
|
||||||
|
.name("print-bootstrap-result")
|
||||||
|
.end_workflow()
|
||||||
|
.build("bootstrap", 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_returns_valid_definition() {
|
||||||
|
let def = build();
|
||||||
|
assert_eq!(def.id, "bootstrap");
|
||||||
|
assert_eq!(def.version, 1);
|
||||||
|
assert_eq!(def.steps.len(), 7);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_step_names() {
|
||||||
|
let def = build();
|
||||||
|
let names: Vec<Option<&str>> = def
|
||||||
|
.steps
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.name.as_deref())
|
||||||
|
.collect();
|
||||||
|
assert_eq!(
|
||||||
|
names,
|
||||||
|
vec![
|
||||||
|
Some("get-admin-password"),
|
||||||
|
Some("wait-for-gitea-pod"),
|
||||||
|
Some("set-admin-password"),
|
||||||
|
Some("mark-admin-private"),
|
||||||
|
Some("create-orgs"),
|
||||||
|
Some("configure-oidc"),
|
||||||
|
Some("print-bootstrap-result"),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
37
src/workflows/bootstrap/mod.rs
Normal file
37
src/workflows/bootstrap/mod.rs
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
//! Bootstrap workflow — Gitea admin setup, org creation, OIDC configuration.
|
||||||
|
|
||||||
|
pub mod definition;
|
||||||
|
pub mod steps;
|
||||||
|
|
||||||
|
use crate::output;
|
||||||
|
|
||||||
|
/// Register all bootstrap workflow steps and the workflow definition with a host.
|
||||||
|
pub async fn register(host: &wfe::WorkflowHost) {
|
||||||
|
host.register_step::<steps::GetAdminPassword>().await;
|
||||||
|
host.register_step::<steps::WaitForGiteaPod>().await;
|
||||||
|
host.register_step::<steps::SetAdminPassword>().await;
|
||||||
|
host.register_step::<steps::MarkAdminPrivate>().await;
|
||||||
|
host.register_step::<steps::CreateOrgs>().await;
|
||||||
|
host.register_step::<steps::ConfigureOIDC>().await;
|
||||||
|
host.register_step::<steps::PrintBootstrapResult>().await;
|
||||||
|
|
||||||
|
host.register_workflow_definition(definition::build()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a summary of the completed bootstrap workflow.
|
||||||
|
pub fn print_summary(instance: &wfe_core::models::WorkflowInstance) {
|
||||||
|
output::step("Bootstrap workflow summary:");
|
||||||
|
for ep in &instance.execution_pointers {
|
||||||
|
let fallback = format!("step-{}", ep.step_id);
|
||||||
|
let name = ep.step_name.as_deref().unwrap_or(&fallback);
|
||||||
|
let status = format!("{:?}", ep.status);
|
||||||
|
let duration = match (ep.start_time, ep.end_time) {
|
||||||
|
(Some(start), Some(end)) => {
|
||||||
|
let d = end - start;
|
||||||
|
format!("{}ms", d.num_milliseconds())
|
||||||
|
}
|
||||||
|
_ => "-".to_string(),
|
||||||
|
};
|
||||||
|
output::ok(&format!(" {name:<40} {status:<12} {duration}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
453
src/workflows/bootstrap/steps/bootstrap.rs
Normal file
453
src/workflows/bootstrap/steps/bootstrap.rs
Normal file
@@ -0,0 +1,453 @@
|
|||||||
|
//! Steps for the bootstrap workflow — Gitea admin setup, org creation, OIDC.
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::kube as k;
|
||||||
|
use crate::output::{ok, step, warn};
|
||||||
|
use crate::workflows::data::BootstrapData;
|
||||||
|
|
||||||
|
const GITEA_ADMIN_USER: &str = "gitea_admin";
|
||||||
|
const GITEA_ADMIN_EMAIL: &str = "gitea@local.domain";
|
||||||
|
|
||||||
|
fn load_data(ctx: &StepExecutionContext<'_>) -> wfe_core::Result<BootstrapData> {
|
||||||
|
serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn step_err(msg: impl Into<String>) -> wfe_core::WfeError {
|
||||||
|
wfe_core::WfeError::StepExecution(msg.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── GetAdminPassword ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Retrieve the Gitea admin password from the K8s secret.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct GetAdminPassword;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for GetAdminPassword {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let step_ctx = data.ctx.as_ref()
|
||||||
|
.ok_or_else(|| step_err("missing __ctx in workflow data"))?;
|
||||||
|
|
||||||
|
k::set_context(&step_ctx.kube_context, &step_ctx.ssh_host);
|
||||||
|
|
||||||
|
let pass = k::kube_get_secret_field("devtools", "gitea-admin-credentials", "password")
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
if pass.is_empty() {
|
||||||
|
warn("gitea-admin-credentials password not found -- cannot bootstrap.");
|
||||||
|
return Err(step_err("gitea-admin-credentials password not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let domain = k::get_domain().await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({
|
||||||
|
"gitea_admin_pass": pass,
|
||||||
|
"domain": domain,
|
||||||
|
}));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── WaitForGiteaPod ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Wait for a Running + Ready Gitea pod (up to 3 minutes).
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct WaitForGiteaPod;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for WaitForGiteaPod {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
step("Waiting for Gitea pod...");
|
||||||
|
|
||||||
|
let client = k::get_client().await.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
||||||
|
kube::Api::namespaced(client.clone(), "devtools");
|
||||||
|
|
||||||
|
for _ in 0..60 {
|
||||||
|
let lp = kube::api::ListParams::default().labels("app.kubernetes.io/name=gitea");
|
||||||
|
if let Ok(pod_list) = pods.list(&lp).await {
|
||||||
|
for pod in &pod_list.items {
|
||||||
|
let phase = pod.status.as_ref()
|
||||||
|
.and_then(|s| s.phase.as_deref())
|
||||||
|
.unwrap_or("");
|
||||||
|
if phase != "Running" {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let ready = pod.status.as_ref()
|
||||||
|
.and_then(|s| s.container_statuses.as_ref())
|
||||||
|
.and_then(|cs| cs.first())
|
||||||
|
.map(|c| c.ready)
|
||||||
|
.unwrap_or(false);
|
||||||
|
if ready {
|
||||||
|
let name = pod.metadata.name.as_deref().unwrap_or("").to_string();
|
||||||
|
if !name.is_empty() {
|
||||||
|
ok(&format!("Gitea pod ready: {name}"));
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "gitea_pod": name }));
|
||||||
|
return Ok(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
warn("Gitea pod not ready after 3 min -- skipping bootstrap.");
|
||||||
|
Err(step_err("Gitea pod not ready after 3 minutes"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── SetAdminPassword ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Set the Gitea admin password via CLI.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct SetAdminPassword;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for SetAdminPassword {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let pod = data.gitea_pod.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_pod not set"))?;
|
||||||
|
let password = data.gitea_admin_pass.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_admin_pass not set"))?;
|
||||||
|
|
||||||
|
let (code, output) = k::kube_exec(
|
||||||
|
"devtools",
|
||||||
|
pod,
|
||||||
|
&[
|
||||||
|
"gitea", "admin", "user", "change-password",
|
||||||
|
"--username", GITEA_ADMIN_USER,
|
||||||
|
"--password", password,
|
||||||
|
"--must-change-password=false",
|
||||||
|
],
|
||||||
|
Some("gitea"),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
if code == 0 || output.to_lowercase().contains("password") {
|
||||||
|
ok(&format!("Admin '{GITEA_ADMIN_USER}' password set."));
|
||||||
|
} else {
|
||||||
|
warn(&format!("change-password: {output}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── MarkAdminPrivate ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Mark the admin account as private via API.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct MarkAdminPrivate;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for MarkAdminPrivate {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let pod = data.gitea_pod.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_pod not set"))?;
|
||||||
|
let password = data.gitea_admin_pass.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_admin_pass not set"))?;
|
||||||
|
|
||||||
|
let body = serde_json::json!({
|
||||||
|
"source_id": 0,
|
||||||
|
"login_name": GITEA_ADMIN_USER,
|
||||||
|
"email": GITEA_ADMIN_EMAIL,
|
||||||
|
"visibility": "private",
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = gitea_api(
|
||||||
|
pod, "PATCH",
|
||||||
|
&format!("/admin/users/{GITEA_ADMIN_USER}"),
|
||||||
|
password,
|
||||||
|
Some(&body),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.get("login").and_then(|v| v.as_str()) == Some(GITEA_ADMIN_USER) {
|
||||||
|
ok(&format!("Admin '{GITEA_ADMIN_USER}' marked as private."));
|
||||||
|
} else {
|
||||||
|
warn(&format!("Could not set admin visibility: {result}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── CreateOrgs ─────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Create the studio and internal organizations.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct CreateOrgs;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for CreateOrgs {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let pod = data.gitea_pod.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_pod not set"))?;
|
||||||
|
let password = data.gitea_admin_pass.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_admin_pass not set"))?;
|
||||||
|
|
||||||
|
let orgs = [
|
||||||
|
("studio", "public", "Public source code"),
|
||||||
|
("internal", "private", "Internal tools and services"),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (org_name, visibility, desc) in &orgs {
|
||||||
|
let body = serde_json::json!({
|
||||||
|
"username": org_name,
|
||||||
|
"visibility": visibility,
|
||||||
|
"description": desc,
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = gitea_api(pod, "POST", "/orgs", password, Some(&body)).await?;
|
||||||
|
|
||||||
|
if result.get("id").is_some() {
|
||||||
|
ok(&format!("Created org '{org_name}'."));
|
||||||
|
} else if result
|
||||||
|
.get("message")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_lowercase()
|
||||||
|
.contains("already")
|
||||||
|
{
|
||||||
|
ok(&format!("Org '{org_name}' already exists."));
|
||||||
|
} else {
|
||||||
|
let msg = result
|
||||||
|
.get("message")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.unwrap_or_else(|| format!("{result}"));
|
||||||
|
warn(&format!("Org '{org_name}': {msg}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── ConfigureOIDC ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Configure Hydra as the OIDC authentication source.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ConfigureOIDC;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for ConfigureOIDC {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let pod = data.gitea_pod.as_deref()
|
||||||
|
.ok_or_else(|| step_err("gitea_pod not set"))?;
|
||||||
|
|
||||||
|
let (_, auth_list_output) = k::kube_exec(
|
||||||
|
"devtools", pod, &["gitea", "admin", "auth", "list"], Some("gitea"),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
let mut existing_id: Option<String> = None;
|
||||||
|
let mut exact_ok = false;
|
||||||
|
|
||||||
|
for line in auth_list_output.lines().skip(1) {
|
||||||
|
let parts: Vec<&str> = line.split('\t').collect();
|
||||||
|
if parts.len() < 2 {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let src_id = parts[0].trim();
|
||||||
|
let src_name = parts[1].trim();
|
||||||
|
|
||||||
|
if src_name == "Sunbeam" {
|
||||||
|
exact_ok = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let src_type = if parts.len() > 2 { parts[2].trim() } else { "" };
|
||||||
|
if src_name == "Sunbeam Auth"
|
||||||
|
|| (src_name.starts_with("Sunbeam") && src_type == "OAuth2")
|
||||||
|
{
|
||||||
|
existing_id = Some(src_id.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if exact_ok {
|
||||||
|
ok("OIDC auth source 'Sunbeam' already present.");
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(eid) = existing_id {
|
||||||
|
let (code, stderr) = k::kube_exec(
|
||||||
|
"devtools",
|
||||||
|
pod,
|
||||||
|
&[
|
||||||
|
"gitea", "admin", "auth", "update-oauth",
|
||||||
|
"--id", &eid,
|
||||||
|
"--name", "Sunbeam",
|
||||||
|
],
|
||||||
|
Some("gitea"),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
if code == 0 {
|
||||||
|
ok(&format!("Renamed OIDC auth source (id={eid}) to 'Sunbeam'."));
|
||||||
|
} else {
|
||||||
|
warn(&format!("Rename failed: {stderr}"));
|
||||||
|
}
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new OIDC auth source
|
||||||
|
let oidc_id = k::kube_get_secret_field("lasuite", "oidc-gitea", "CLIENT_ID").await;
|
||||||
|
let oidc_secret = k::kube_get_secret_field("lasuite", "oidc-gitea", "CLIENT_SECRET").await;
|
||||||
|
|
||||||
|
match (oidc_id, oidc_secret) {
|
||||||
|
(Ok(oidc_id), Ok(oidc_sec)) => {
|
||||||
|
let discover_url =
|
||||||
|
"http://hydra-public.ory.svc.cluster.local:4444/.well-known/openid-configuration";
|
||||||
|
|
||||||
|
let (code, stderr) = k::kube_exec(
|
||||||
|
"devtools",
|
||||||
|
pod,
|
||||||
|
&[
|
||||||
|
"gitea", "admin", "auth", "add-oauth",
|
||||||
|
"--name", "Sunbeam",
|
||||||
|
"--provider", "openidConnect",
|
||||||
|
"--key", &oidc_id,
|
||||||
|
"--secret", &oidc_sec,
|
||||||
|
"--auto-discover-url", discover_url,
|
||||||
|
"--scopes", "openid",
|
||||||
|
"--scopes", "email",
|
||||||
|
"--scopes", "profile",
|
||||||
|
],
|
||||||
|
Some("gitea"),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
if code == 0 {
|
||||||
|
ok("OIDC auth source 'Sunbeam' configured.");
|
||||||
|
} else {
|
||||||
|
warn(&format!("OIDC auth source config failed: {stderr}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn("oidc-gitea secret not found -- OIDC auth source not configured.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── PrintBootstrapResult ───────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Print the final bootstrap result.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PrintBootstrapResult;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for PrintBootstrapResult {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let domain = data.domain.as_deref().unwrap_or("unknown");
|
||||||
|
ok(&format!(
|
||||||
|
"Gitea ready -- https://src.{domain} ({GITEA_ADMIN_USER} / <from openbao>)"
|
||||||
|
));
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Helpers ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Call Gitea API via kubectl curl inside the pod.
|
||||||
|
async fn gitea_api(
|
||||||
|
pod: &str,
|
||||||
|
method: &str,
|
||||||
|
path: &str,
|
||||||
|
password: &str,
|
||||||
|
data: Option<&serde_json::Value>,
|
||||||
|
) -> wfe_core::Result<serde_json::Value> {
|
||||||
|
let url = format!("http://localhost:3000/api/v1{path}");
|
||||||
|
let auth = format!("{GITEA_ADMIN_USER}:{password}");
|
||||||
|
|
||||||
|
let mut args = vec![
|
||||||
|
"curl", "-s", "-X", method, &url, "-H", "Content-Type: application/json", "-u", &auth,
|
||||||
|
];
|
||||||
|
|
||||||
|
let data_str;
|
||||||
|
if let Some(d) = data {
|
||||||
|
data_str = serde_json::to_string(d)
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
args.push("-d");
|
||||||
|
args.push(&data_str);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (_, stdout) = k::kube_exec("devtools", pod, &args, Some("gitea"))
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(serde_json::from_str(&stdout).unwrap_or(serde_json::Value::Object(Default::default())))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_admin_password_is_default() { let _ = GetAdminPassword::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn wait_for_gitea_pod_is_default() { let _ = WaitForGiteaPod::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn set_admin_password_is_default() { let _ = SetAdminPassword::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mark_admin_private_is_default() { let _ = MarkAdminPrivate::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn create_orgs_is_default() { let _ = CreateOrgs::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn configure_oidc_is_default() { let _ = ConfigureOIDC::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn print_bootstrap_result_is_default() { let _ = PrintBootstrapResult::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_constants() {
|
||||||
|
assert_eq!(GITEA_ADMIN_USER, "gitea_admin");
|
||||||
|
assert_eq!(GITEA_ADMIN_EMAIL, "gitea@local.domain");
|
||||||
|
}
|
||||||
|
}
|
||||||
13
src/workflows/bootstrap/steps/mod.rs
Normal file
13
src/workflows/bootstrap/steps/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
//! Bootstrap workflow steps — Gitea admin setup, org creation, OIDC configuration.
|
||||||
|
|
||||||
|
mod bootstrap;
|
||||||
|
|
||||||
|
pub use bootstrap::{
|
||||||
|
GetAdminPassword,
|
||||||
|
WaitForGiteaPod,
|
||||||
|
SetAdminPassword,
|
||||||
|
MarkAdminPrivate,
|
||||||
|
CreateOrgs,
|
||||||
|
ConfigureOIDC,
|
||||||
|
PrintBootstrapResult,
|
||||||
|
};
|
||||||
416
src/workflows/cmd.rs
Normal file
416
src/workflows/cmd.rs
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
use clap::Subcommand;
|
||||||
|
use wfe_core::traits::WorkflowRepository;
|
||||||
|
|
||||||
|
use crate::error::{Result, SunbeamError};
|
||||||
|
use crate::output;
|
||||||
|
|
||||||
|
use super::host;
|
||||||
|
|
||||||
|
#[derive(Subcommand, Debug)]
|
||||||
|
pub enum WorkflowAction {
|
||||||
|
/// List workflow instances.
|
||||||
|
List {
|
||||||
|
/// Filter by status (runnable, complete, terminated, suspended).
|
||||||
|
#[arg(long, default_value = "")]
|
||||||
|
status: String,
|
||||||
|
},
|
||||||
|
/// Show status of a workflow instance.
|
||||||
|
Status {
|
||||||
|
/// Workflow instance ID.
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
/// Retry a failed workflow from its last checkpoint.
|
||||||
|
Retry {
|
||||||
|
/// Workflow instance ID.
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
/// Cancel a running workflow.
|
||||||
|
Cancel {
|
||||||
|
/// Workflow instance ID.
|
||||||
|
id: String,
|
||||||
|
},
|
||||||
|
/// Run a YAML-defined workflow.
|
||||||
|
Run {
|
||||||
|
/// Path to workflow YAML file (default: ./workflows.yaml).
|
||||||
|
#[arg(default_value = "")]
|
||||||
|
file: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Dispatch a `sunbeam workflow <action>` command.
|
||||||
|
pub async fn dispatch(context_name: &str, action: WorkflowAction) -> Result<()> {
|
||||||
|
if let WorkflowAction::Run { file } = action {
|
||||||
|
return run_workflow(&file).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
let h = host::create_host(context_name).await?;
|
||||||
|
let result = dispatch_with_host(&h, action).await;
|
||||||
|
host::shutdown_host(h).await;
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Inner dispatch that operates on an already-created host. Testable.
|
||||||
|
pub async fn dispatch_with_host(
|
||||||
|
h: &wfe::WorkflowHost,
|
||||||
|
action: WorkflowAction,
|
||||||
|
) -> Result<()> {
|
||||||
|
match action {
|
||||||
|
WorkflowAction::List { status } => list_workflows(h, &status).await,
|
||||||
|
WorkflowAction::Status { id } => show_workflow_status(h, &id).await,
|
||||||
|
WorkflowAction::Retry { id } => retry_workflow(h, &id).await,
|
||||||
|
WorkflowAction::Cancel { id } => cancel_workflow(h, &id).await,
|
||||||
|
WorkflowAction::Run { .. } => unreachable!("handled above"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List workflow instances.
|
||||||
|
pub async fn list_workflows(h: &wfe::WorkflowHost, _status_filter: &str) -> Result<()> {
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let ids = h
|
||||||
|
.persistence()
|
||||||
|
.get_runnable_instances(now)
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("query workflows: {e}")))?;
|
||||||
|
|
||||||
|
if ids.is_empty() {
|
||||||
|
output::ok("No workflow instances found.");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let instances = h
|
||||||
|
.persistence()
|
||||||
|
.get_workflow_instances(&ids)
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("load workflows: {e}")))?;
|
||||||
|
|
||||||
|
let rows: Vec<Vec<String>> = instances
|
||||||
|
.iter()
|
||||||
|
.map(|wf| {
|
||||||
|
vec![
|
||||||
|
wf.id.clone(),
|
||||||
|
wf.workflow_definition_id.clone(),
|
||||||
|
format!("{:?}", wf.status),
|
||||||
|
]
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
println!("{}", output::table(&rows, &["ID", "DEFINITION", "STATUS"]));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Show status of a single workflow instance.
|
||||||
|
pub async fn show_workflow_status(h: &wfe::WorkflowHost, id: &str) -> Result<()> {
|
||||||
|
match h.get_workflow(id).await {
|
||||||
|
Ok(wf) => {
|
||||||
|
output::ok(&format!("Workflow: {}", wf.workflow_definition_id));
|
||||||
|
output::ok(&format!("Status: {:?}", wf.status));
|
||||||
|
output::ok(&format!("Created: {}", wf.create_time));
|
||||||
|
if let Some(ct) = wf.complete_time {
|
||||||
|
output::ok(&format!("Completed: {ct}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
output::step("Execution pointers:");
|
||||||
|
let rows: Vec<Vec<String>> = wf
|
||||||
|
.execution_pointers
|
||||||
|
.iter()
|
||||||
|
.map(|ep| {
|
||||||
|
vec![
|
||||||
|
ep.step_name
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| format!("step-{}", ep.step_id)),
|
||||||
|
format!("{:?}", ep.status),
|
||||||
|
ep.start_time
|
||||||
|
.map(|t| t.to_string())
|
||||||
|
.unwrap_or_default(),
|
||||||
|
ep.end_time.map(|t| t.to_string()).unwrap_or_default(),
|
||||||
|
format!("{}", ep.retry_count),
|
||||||
|
]
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"{}",
|
||||||
|
output::table(&rows, &["STEP", "STATUS", "STARTED", "ENDED", "RETRIES"])
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
output::warn(&format!("Workflow instance '{id}' not found: {e}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resume a suspended/failed workflow.
|
||||||
|
pub async fn retry_workflow(h: &wfe::WorkflowHost, id: &str) -> Result<()> {
|
||||||
|
h.resume_workflow(id)
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("resume workflow: {e}")))?;
|
||||||
|
output::ok(&format!("Workflow '{id}' resumed."));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Terminate a running workflow.
|
||||||
|
pub async fn cancel_workflow(h: &wfe::WorkflowHost, id: &str) -> Result<()> {
|
||||||
|
h.terminate_workflow(id)
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("terminate workflow: {e}")))?;
|
||||||
|
output::ok(&format!("Workflow '{id}' cancelled."));
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn run_workflow(_file: &str) -> Result<()> {
|
||||||
|
Err(SunbeamError::Other(
|
||||||
|
"sunbeam workflow run is not yet implemented".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::time::Duration;
|
||||||
|
use wfe::run_workflow_sync;
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::{ExecutionResult, WorkflowStatus};
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct NoOp;
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for NoOp {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn setup_host_with_workflow() -> (wfe::WorkflowHost, String) {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
h.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("test-step")
|
||||||
|
.end_workflow()
|
||||||
|
.build("test-def", 1);
|
||||||
|
h.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&h,
|
||||||
|
"test-def",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
(h, instance.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_list_workflows_empty() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
let result = list_workflows(&h, "").await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_show_workflow_status_not_found() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
let result = show_workflow_status(&h, "nonexistent-id").await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_show_workflow_status_found() {
|
||||||
|
let (h, id) = setup_host_with_workflow().await;
|
||||||
|
let result = show_workflow_status(&h, &id).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_show_status_with_step_details() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
h.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("step-alpha")
|
||||||
|
.then::<NoOp>()
|
||||||
|
.name("step-beta")
|
||||||
|
.end_workflow()
|
||||||
|
.build("multi-def", 1);
|
||||||
|
h.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&h,
|
||||||
|
"multi-def",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
let result = show_workflow_status(&h, &instance.id).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cancel_workflow_completed() {
|
||||||
|
let (h, id) = setup_host_with_workflow().await;
|
||||||
|
let result = cancel_workflow(&h, &id).await;
|
||||||
|
drop(result);
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_retry_workflow_nonexistent() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
let result = retry_workflow(&h, "does-not-exist").await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cancel_workflow_nonexistent() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
let result = cancel_workflow(&h, "does-not-exist").await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_run_workflow_not_implemented() {
|
||||||
|
let result = run_workflow("test.yaml").await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert!(result.unwrap_err().to_string().contains("not yet implemented"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_dispatch_with_host_list() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
let result = dispatch_with_host(
|
||||||
|
&h,
|
||||||
|
WorkflowAction::List { status: String::new() },
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_dispatch_with_host_status() {
|
||||||
|
let (h, id) = setup_host_with_workflow().await;
|
||||||
|
let result = dispatch_with_host(
|
||||||
|
&h,
|
||||||
|
WorkflowAction::Status { id },
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_dispatch_with_host_retry_nonexistent() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
let result = dispatch_with_host(
|
||||||
|
&h,
|
||||||
|
WorkflowAction::Retry { id: "nope".to_string() },
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
assert!(result.is_err());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_retry_suspended_workflow() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
h.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("suspend-step")
|
||||||
|
.end_workflow()
|
||||||
|
.build("suspend-def", 1);
|
||||||
|
h.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let id = h.start_workflow("suspend-def", 1, serde_json::json!({})).await.unwrap();
|
||||||
|
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||||
|
|
||||||
|
// Suspend it
|
||||||
|
let _ = h.suspend_workflow(&id).await;
|
||||||
|
|
||||||
|
// Resume should succeed
|
||||||
|
let result = retry_workflow(&h, &id).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_cancel_running_workflow() {
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
h.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("cancel-step")
|
||||||
|
.wait_for("never-event", "never-key")
|
||||||
|
.name("waiting")
|
||||||
|
.end_workflow()
|
||||||
|
.build("cancel-def", 1);
|
||||||
|
h.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let id = h.start_workflow("cancel-def", 1, serde_json::json!({})).await.unwrap();
|
||||||
|
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||||
|
|
||||||
|
let result = cancel_workflow(&h, &id).await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_dispatch_with_host_cancel() {
|
||||||
|
let (h, id) = setup_host_with_workflow().await;
|
||||||
|
let result = dispatch_with_host(
|
||||||
|
&h,
|
||||||
|
WorkflowAction::Cancel { id },
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
drop(result);
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_list_workflows_with_runnable_instance() {
|
||||||
|
use wfe_core::models::WorkflowInstance;
|
||||||
|
|
||||||
|
let h = host::create_test_host().await.unwrap();
|
||||||
|
|
||||||
|
// Manually persist a Runnable workflow so get_runnable_instances finds it
|
||||||
|
let instance = WorkflowInstance::new(
|
||||||
|
"manual-def",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
);
|
||||||
|
|
||||||
|
h.persistence()
|
||||||
|
.create_new_workflow(&instance)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Now list_workflows should hit the non-empty path
|
||||||
|
let result = list_workflows(&h, "").await;
|
||||||
|
assert!(result.is_ok());
|
||||||
|
h.stop().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
281
src/workflows/data.rs
Normal file
281
src/workflows/data.rs
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::StepContext;
|
||||||
|
|
||||||
|
/// Workflow data for the `seed` workflow.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
pub struct SeedData {
|
||||||
|
/// Shared CLI context (domain, kube context, etc.)
|
||||||
|
#[serde(default, rename = "__ctx")]
|
||||||
|
pub ctx: Option<StepContext>,
|
||||||
|
|
||||||
|
// -- Phase 1: OpenBao init --
|
||||||
|
pub ob_pod: Option<String>,
|
||||||
|
pub ob_port: Option<u16>,
|
||||||
|
pub root_token: Option<String>,
|
||||||
|
pub initialized: Option<bool>,
|
||||||
|
pub sealed: Option<bool>,
|
||||||
|
pub skip_seed: bool,
|
||||||
|
|
||||||
|
// -- Phase 2: KV seeding --
|
||||||
|
/// Accumulated credential values keyed by "path/field".
|
||||||
|
#[serde(default)]
|
||||||
|
pub creds: HashMap<String, String>,
|
||||||
|
/// KV paths that were modified and need writing.
|
||||||
|
#[serde(default)]
|
||||||
|
pub dirty_paths: Vec<String>,
|
||||||
|
|
||||||
|
// -- Phase 4: PostgreSQL --
|
||||||
|
pub pg_pod: Option<String>,
|
||||||
|
|
||||||
|
// -- Phase 6: Kratos admin --
|
||||||
|
pub recovery_link: Option<String>,
|
||||||
|
pub recovery_code: Option<String>,
|
||||||
|
pub dkim_public_key: Option<String>,
|
||||||
|
pub admin_identity_id: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Workflow data for the `up` workflow.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
pub struct UpData {
|
||||||
|
#[serde(default, rename = "__ctx")]
|
||||||
|
pub ctx: Option<StepContext>,
|
||||||
|
pub domain: String,
|
||||||
|
|
||||||
|
// -- Vault phase (reused from seed) --
|
||||||
|
pub ob_pod: Option<String>,
|
||||||
|
pub ob_port: Option<u16>,
|
||||||
|
pub root_token: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub skip_seed: bool,
|
||||||
|
#[serde(default)]
|
||||||
|
pub creds: HashMap<String, String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub dirty_paths: Vec<String>,
|
||||||
|
|
||||||
|
// -- Postgres phase --
|
||||||
|
pub pg_pod: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Workflow data for the `verify` workflow.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
pub struct VerifyData {
|
||||||
|
#[serde(default, rename = "__ctx")]
|
||||||
|
pub ctx: Option<StepContext>,
|
||||||
|
pub ob_pod: Option<String>,
|
||||||
|
pub ob_port: Option<u16>,
|
||||||
|
pub root_token: Option<String>,
|
||||||
|
pub test_value: Option<String>,
|
||||||
|
pub synced: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Workflow data for the `bootstrap` workflow.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||||
|
pub struct BootstrapData {
|
||||||
|
#[serde(default, rename = "__ctx")]
|
||||||
|
pub ctx: Option<StepContext>,
|
||||||
|
pub gitea_pod: Option<String>,
|
||||||
|
pub gitea_admin_pass: Option<String>,
|
||||||
|
pub domain: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
fn make_ctx() -> StepContext {
|
||||||
|
StepContext {
|
||||||
|
domain: "test.local".to_string(),
|
||||||
|
infra_dir: "/tmp".to_string(),
|
||||||
|
kube_context: "test".to_string(),
|
||||||
|
ssh_host: String::new(),
|
||||||
|
is_production: false,
|
||||||
|
acme_email: String::new(),
|
||||||
|
context_name: "default".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- SeedData --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_default() {
|
||||||
|
let d = SeedData::default();
|
||||||
|
assert!(d.ctx.is_none());
|
||||||
|
assert!(d.ob_pod.is_none());
|
||||||
|
assert!(!d.skip_seed);
|
||||||
|
assert!(d.creds.is_empty());
|
||||||
|
assert!(d.dirty_paths.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_serialization_roundtrip() {
|
||||||
|
let mut creds = HashMap::new();
|
||||||
|
creds.insert("hydra/system-secret".to_string(), "abc123".to_string());
|
||||||
|
creds.insert("kratos/cookie-secret".to_string(), "xyz789".to_string());
|
||||||
|
|
||||||
|
let d = SeedData {
|
||||||
|
ctx: Some(make_ctx()),
|
||||||
|
ob_pod: Some("openbao-0".to_string()),
|
||||||
|
ob_port: Some(8200),
|
||||||
|
root_token: Some("hvs.test".to_string()),
|
||||||
|
initialized: Some(true),
|
||||||
|
sealed: Some(false),
|
||||||
|
skip_seed: false,
|
||||||
|
creds,
|
||||||
|
dirty_paths: vec!["hydra".to_string()],
|
||||||
|
pg_pod: Some("postgres-1".to_string()),
|
||||||
|
recovery_link: None,
|
||||||
|
recovery_code: None,
|
||||||
|
dkim_public_key: Some("MIIBIjAN...".to_string()),
|
||||||
|
admin_identity_id: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let json = serde_json::to_value(&d).unwrap();
|
||||||
|
let back: SeedData = serde_json::from_value(json.clone()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(back.ob_pod.as_deref(), Some("openbao-0"));
|
||||||
|
assert_eq!(back.ob_port, Some(8200));
|
||||||
|
assert_eq!(back.root_token.as_deref(), Some("hvs.test"));
|
||||||
|
assert_eq!(back.initialized, Some(true));
|
||||||
|
assert_eq!(back.sealed, Some(false));
|
||||||
|
assert_eq!(back.creds.len(), 2);
|
||||||
|
assert_eq!(back.creds["hydra/system-secret"], "abc123");
|
||||||
|
assert_eq!(back.dirty_paths, vec!["hydra"]);
|
||||||
|
assert_eq!(back.pg_pod.as_deref(), Some("postgres-1"));
|
||||||
|
assert_eq!(back.dkim_public_key.as_deref(), Some("MIIBIjAN..."));
|
||||||
|
|
||||||
|
// __ctx rename should work
|
||||||
|
assert!(json.get("__ctx").is_some());
|
||||||
|
assert!(json.get("ctx").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_ctx_rename() {
|
||||||
|
let d = SeedData {
|
||||||
|
ctx: Some(make_ctx()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let json = serde_json::to_value(&d).unwrap();
|
||||||
|
// The field should be serialized as "__ctx", not "ctx"
|
||||||
|
assert!(json.get("__ctx").is_some());
|
||||||
|
assert!(json.get("ctx").is_none());
|
||||||
|
// And deserializes back
|
||||||
|
let back: SeedData = serde_json::from_value(json).unwrap();
|
||||||
|
assert!(back.ctx.is_some());
|
||||||
|
assert_eq!(back.ctx.unwrap().domain, "test.local");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_from_json_without_ctx() {
|
||||||
|
// Workflow data might not have __ctx initially
|
||||||
|
let json = serde_json::json!({
|
||||||
|
"ob_pod": "openbao-0",
|
||||||
|
"skip_seed": true,
|
||||||
|
});
|
||||||
|
let d: SeedData = serde_json::from_value(json).unwrap();
|
||||||
|
assert!(d.ctx.is_none());
|
||||||
|
assert_eq!(d.ob_pod.as_deref(), Some("openbao-0"));
|
||||||
|
assert!(d.skip_seed);
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- UpData --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_up_data_default() {
|
||||||
|
let d = UpData::default();
|
||||||
|
assert!(d.ctx.is_none());
|
||||||
|
assert!(d.domain.is_empty());
|
||||||
|
assert!(!d.skip_seed);
|
||||||
|
assert!(d.ob_pod.is_none());
|
||||||
|
assert!(d.creds.is_empty());
|
||||||
|
assert!(d.pg_pod.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_up_data_roundtrip() {
|
||||||
|
let d = UpData {
|
||||||
|
ctx: Some(make_ctx()),
|
||||||
|
domain: "sunbeam.pt".to_string(),
|
||||||
|
ob_pod: Some("openbao-0".to_string()),
|
||||||
|
ob_port: Some(8200),
|
||||||
|
root_token: Some("hvs.test".to_string()),
|
||||||
|
skip_seed: false,
|
||||||
|
creds: HashMap::new(),
|
||||||
|
dirty_paths: vec![],
|
||||||
|
pg_pod: Some("postgres-1".to_string()),
|
||||||
|
};
|
||||||
|
let json = serde_json::to_value(&d).unwrap();
|
||||||
|
let back: UpData = serde_json::from_value(json).unwrap();
|
||||||
|
assert_eq!(back.domain, "sunbeam.pt");
|
||||||
|
assert!(back.ctx.is_some());
|
||||||
|
assert_eq!(back.ob_pod.as_deref(), Some("openbao-0"));
|
||||||
|
assert_eq!(back.pg_pod.as_deref(), Some("postgres-1"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- VerifyData --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_verify_data_default() {
|
||||||
|
let d = VerifyData::default();
|
||||||
|
assert!(!d.synced);
|
||||||
|
assert!(d.test_value.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_verify_data_roundtrip() {
|
||||||
|
let d = VerifyData {
|
||||||
|
ctx: Some(make_ctx()),
|
||||||
|
ob_pod: Some("openbao-0".to_string()),
|
||||||
|
ob_port: Some(8200),
|
||||||
|
root_token: Some("root".to_string()),
|
||||||
|
test_value: Some("sentinel-abc".to_string()),
|
||||||
|
synced: true,
|
||||||
|
};
|
||||||
|
let json = serde_json::to_value(&d).unwrap();
|
||||||
|
let back: VerifyData = serde_json::from_value(json).unwrap();
|
||||||
|
assert!(back.synced);
|
||||||
|
assert_eq!(back.test_value.as_deref(), Some("sentinel-abc"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- BootstrapData --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bootstrap_data_default() {
|
||||||
|
let d = BootstrapData::default();
|
||||||
|
assert!(d.gitea_pod.is_none());
|
||||||
|
assert!(d.gitea_admin_pass.is_none());
|
||||||
|
assert!(d.domain.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bootstrap_data_roundtrip() {
|
||||||
|
let d = BootstrapData {
|
||||||
|
ctx: Some(make_ctx()),
|
||||||
|
gitea_pod: Some("gitea-0".to_string()),
|
||||||
|
gitea_admin_pass: Some("admin123".to_string()),
|
||||||
|
domain: Some("test.local".to_string()),
|
||||||
|
};
|
||||||
|
let json = serde_json::to_value(&d).unwrap();
|
||||||
|
let back: BootstrapData = serde_json::from_value(json).unwrap();
|
||||||
|
assert_eq!(back.gitea_pod.as_deref(), Some("gitea-0"));
|
||||||
|
assert_eq!(back.gitea_admin_pass.as_deref(), Some("admin123"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- Cross-data-type: ensure WFE can use serde_json::Value as data --
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_as_json_value() {
|
||||||
|
let d = SeedData {
|
||||||
|
ctx: Some(make_ctx()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
// WFE stores data as serde_json::Value — verify this works
|
||||||
|
let val = serde_json::to_value(&d).unwrap();
|
||||||
|
assert!(val.is_object());
|
||||||
|
// And can be read back
|
||||||
|
let back: SeedData = serde_json::from_value(val).unwrap();
|
||||||
|
assert!(back.ctx.is_some());
|
||||||
|
}
|
||||||
|
}
|
||||||
328
src/workflows/host.rs
Normal file
328
src/workflows/host.rs
Normal file
@@ -0,0 +1,328 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use wfe::WorkflowHostBuilder;
|
||||||
|
use wfe_core::test_support::{InMemoryLockProvider, InMemoryQueueProvider};
|
||||||
|
use wfe_sqlite::SqlitePersistenceProvider;
|
||||||
|
|
||||||
|
use crate::error::{Result, SunbeamError};
|
||||||
|
|
||||||
|
/// Build and start a WorkflowHost with a SQLite database at the given path.
|
||||||
|
///
|
||||||
|
/// Lock and queue providers are in-memory (single-process, non-distributed).
|
||||||
|
pub async fn create_host_at(db_path: &std::path::Path) -> Result<wfe::WorkflowHost> {
|
||||||
|
if let Some(parent) = db_path.parent() {
|
||||||
|
std::fs::create_dir_all(parent).map_err(|e| {
|
||||||
|
SunbeamError::Io {
|
||||||
|
context: format!("create workflow db dir: {}", parent.display()),
|
||||||
|
source: e,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let db_url = format!("sqlite://{}?mode=rwc", db_path.display());
|
||||||
|
let persistence = SqlitePersistenceProvider::new(&db_url)
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("workflow db init: {e}")))?;
|
||||||
|
|
||||||
|
let host = WorkflowHostBuilder::new()
|
||||||
|
.use_persistence(Arc::new(persistence))
|
||||||
|
.use_lock_provider(Arc::new(InMemoryLockProvider::new()))
|
||||||
|
.use_queue_provider(Arc::new(InMemoryQueueProvider::new()))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("workflow host build: {e}")))?;
|
||||||
|
|
||||||
|
host.start()
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("workflow host start: {e}")))?;
|
||||||
|
|
||||||
|
Ok(host)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build and start a WorkflowHost configured for the given context.
|
||||||
|
///
|
||||||
|
/// The host uses a per-context SQLite database at `~/.sunbeam/{context}/workflows.db`.
|
||||||
|
pub async fn create_host(context_name: &str) -> Result<wfe::WorkflowHost> {
|
||||||
|
let db_path = workflow_db_path(context_name);
|
||||||
|
create_host_at(&db_path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gracefully shut down the host.
|
||||||
|
pub async fn shutdown_host(host: wfe::WorkflowHost) {
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a host backed by an in-memory SQLite database (for tests).
|
||||||
|
pub async fn create_test_host() -> Result<wfe::WorkflowHost> {
|
||||||
|
let persistence = SqlitePersistenceProvider::new("sqlite::memory:")
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("in-memory db init: {e}")))?;
|
||||||
|
|
||||||
|
let host = WorkflowHostBuilder::new()
|
||||||
|
.use_persistence(Arc::new(persistence))
|
||||||
|
.use_lock_provider(Arc::new(InMemoryLockProvider::new()))
|
||||||
|
.use_queue_provider(Arc::new(InMemoryQueueProvider::new()))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("test host build: {e}")))?;
|
||||||
|
|
||||||
|
host.start()
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(format!("test host start: {e}")))?;
|
||||||
|
|
||||||
|
Ok(host)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resolve the SQLite database path for a context.
|
||||||
|
pub fn workflow_db_path(context_name: &str) -> PathBuf {
|
||||||
|
crate::config::context_dir(context_name).join("workflows.db")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::time::Duration;
|
||||||
|
use wfe::run_workflow_sync;
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::{ExecutionResult, WorkflowStatus};
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext, WorkflowRepository};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct NoOp;
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for NoOp {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_workflow_db_path_default() {
|
||||||
|
let path = workflow_db_path("");
|
||||||
|
assert!(path.ends_with(".sunbeam/default/workflows.db"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_workflow_db_path_named() {
|
||||||
|
let path = workflow_db_path("production");
|
||||||
|
assert!(path.ends_with(".sunbeam/production/workflows.db"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_workflow_db_path_custom() {
|
||||||
|
let path = workflow_db_path("staging");
|
||||||
|
assert!(path.ends_with(".sunbeam/staging/workflows.db"));
|
||||||
|
assert!(!path.to_string_lossy().contains("default"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_test_host() {
|
||||||
|
let host = create_test_host().await.unwrap();
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let ids = host.persistence().get_runnable_instances(now).await.unwrap();
|
||||||
|
assert!(ids.is_empty());
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_host_at_with_temp_dir() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let db_path = tmp.path().join("ctx").join("workflows.db");
|
||||||
|
let host = create_host_at(&db_path).await.unwrap();
|
||||||
|
|
||||||
|
// DB file should be created
|
||||||
|
assert!(db_path.exists());
|
||||||
|
|
||||||
|
// Should be queryable
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
let ids = host.persistence().get_runnable_instances(now).await.unwrap();
|
||||||
|
assert!(ids.is_empty());
|
||||||
|
|
||||||
|
shutdown_host(host).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_create_host_at_creates_parent_dirs() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let db_path = tmp.path().join("deep").join("nested").join("workflows.db");
|
||||||
|
let host = create_host_at(&db_path).await.unwrap();
|
||||||
|
assert!(db_path.exists());
|
||||||
|
shutdown_host(host).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_shutdown_host_is_clean() {
|
||||||
|
let host = create_test_host().await.unwrap();
|
||||||
|
// Should not panic or hang
|
||||||
|
shutdown_host(host).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_host_start_and_run_trivial_workflow() {
|
||||||
|
let host = create_test_host().await.unwrap();
|
||||||
|
|
||||||
|
host.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("no-op")
|
||||||
|
.end_workflow()
|
||||||
|
.build("test-wf", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"test-wf",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
assert_eq!(instance.workflow_definition_id, "test-wf");
|
||||||
|
assert_eq!(instance.execution_pointers.len(), 1);
|
||||||
|
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_host_multi_step_workflow() {
|
||||||
|
let host = create_test_host().await.unwrap();
|
||||||
|
|
||||||
|
host.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("step-a")
|
||||||
|
.then::<NoOp>()
|
||||||
|
.name("step-b")
|
||||||
|
.then::<NoOp>()
|
||||||
|
.name("step-c")
|
||||||
|
.end_workflow()
|
||||||
|
.build("multi-step", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"multi-step",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
assert_eq!(instance.execution_pointers.len(), 3);
|
||||||
|
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_host_workflow_with_data_output() {
|
||||||
|
let host = create_test_host().await.unwrap();
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct OutputStep;
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for OutputStep {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({"test_key": "test_value"}));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
host.register_step::<OutputStep>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<OutputStep>()
|
||||||
|
.name("output")
|
||||||
|
.end_workflow()
|
||||||
|
.build("output-wf", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"output-wf",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
assert_eq!(instance.data["test_key"], "test_value");
|
||||||
|
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_host_get_workflow_by_id() {
|
||||||
|
let host = create_test_host().await.unwrap();
|
||||||
|
|
||||||
|
host.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("get-test")
|
||||||
|
.end_workflow()
|
||||||
|
.build("get-wf", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"get-wf",
|
||||||
|
1,
|
||||||
|
serde_json::json!({"initial": true}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let fetched = host.get_workflow(&instance.id).await.unwrap();
|
||||||
|
assert_eq!(fetched.id, instance.id);
|
||||||
|
assert_eq!(fetched.workflow_definition_id, "get-wf");
|
||||||
|
assert_eq!(fetched.status, WorkflowStatus::Complete);
|
||||||
|
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_host_with_file_sqlite_runs_workflow() {
|
||||||
|
let tmp = tempfile::tempdir().unwrap();
|
||||||
|
let db_path = tmp.path().join("wf-test").join("workflows.db");
|
||||||
|
let host = create_host_at(&db_path).await.unwrap();
|
||||||
|
|
||||||
|
host.register_step::<NoOp>().await;
|
||||||
|
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<NoOp>()
|
||||||
|
.name("file-test")
|
||||||
|
.end_workflow()
|
||||||
|
.build("file-wf", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"file-wf",
|
||||||
|
1,
|
||||||
|
serde_json::json!({}),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
|
||||||
|
shutdown_host(host).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
223
src/workflows/mod.rs
Normal file
223
src/workflows/mod.rs
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
pub mod cmd;
|
||||||
|
pub mod data;
|
||||||
|
pub mod host;
|
||||||
|
pub mod primitives;
|
||||||
|
|
||||||
|
pub mod seed;
|
||||||
|
pub mod up;
|
||||||
|
pub mod verify;
|
||||||
|
pub mod bootstrap;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::config;
|
||||||
|
use crate::error::Result;
|
||||||
|
|
||||||
|
/// Serializable context passed through workflow data.
|
||||||
|
///
|
||||||
|
/// Steps reconstruct transient handles (kube::Client, BaoClient) from these
|
||||||
|
/// fields at runtime — they are not serializable, so we store just enough
|
||||||
|
/// to recreate them.
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct StepContext {
|
||||||
|
pub domain: String,
|
||||||
|
pub infra_dir: String,
|
||||||
|
pub kube_context: String,
|
||||||
|
pub ssh_host: String,
|
||||||
|
pub is_production: bool,
|
||||||
|
pub acme_email: String,
|
||||||
|
/// The config context name, used for per-context DB paths.
|
||||||
|
pub context_name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl StepContext {
|
||||||
|
/// Build a StepContext from the currently active CLI context.
|
||||||
|
pub fn from_active() -> Self {
|
||||||
|
let ctx = config::active_context();
|
||||||
|
let cfg = config::load_config();
|
||||||
|
Self::from_config(ctx, &cfg.current_context)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build a StepContext from a config Context and context name.
|
||||||
|
/// Separated from `from_active()` to allow unit testing without global state.
|
||||||
|
pub fn from_config(ctx: &config::Context, current_context: &str) -> Self {
|
||||||
|
let context_name = if current_context.is_empty() {
|
||||||
|
"default".to_string()
|
||||||
|
} else {
|
||||||
|
current_context.to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
StepContext {
|
||||||
|
domain: ctx.domain.clone(),
|
||||||
|
infra_dir: ctx.infra_dir.clone(),
|
||||||
|
kube_context: if ctx.kube_context.is_empty() {
|
||||||
|
"sunbeam".to_string()
|
||||||
|
} else {
|
||||||
|
ctx.kube_context.clone()
|
||||||
|
},
|
||||||
|
ssh_host: ctx.ssh_host.clone(),
|
||||||
|
is_production: !ctx.ssh_host.is_empty(),
|
||||||
|
acme_email: ctx.acme_email.clone(),
|
||||||
|
context_name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Reconstruct a Kubernetes client from the stored context.
|
||||||
|
pub async fn kube_client(&self) -> Result<kube::Client> {
|
||||||
|
crate::kube::get_client().await.map(|c| c.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build an OpenBao HTTP client from a local port and token.
|
||||||
|
pub fn bao_client(&self, port: u16, token: &str) -> crate::openbao::BaoClient {
|
||||||
|
crate::openbao::BaoClient::with_token(
|
||||||
|
&format!("http://127.0.0.1:{port}"),
|
||||||
|
token,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn test_ctx() -> StepContext {
|
||||||
|
StepContext {
|
||||||
|
domain: "test.local".to_string(),
|
||||||
|
infra_dir: "/tmp/infra".to_string(),
|
||||||
|
kube_context: "test-cluster".to_string(),
|
||||||
|
ssh_host: String::new(),
|
||||||
|
is_production: false,
|
||||||
|
acme_email: "test@test.local".to_string(),
|
||||||
|
context_name: "test".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn production_ctx() -> StepContext {
|
||||||
|
StepContext {
|
||||||
|
domain: "sunbeam.pt".to_string(),
|
||||||
|
infra_dir: "/srv/infra".to_string(),
|
||||||
|
kube_context: "production".to_string(),
|
||||||
|
ssh_host: "user@server.example.com".to_string(),
|
||||||
|
is_production: true,
|
||||||
|
acme_email: "ops@sunbeam.pt".to_string(),
|
||||||
|
context_name: "production".to_string(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_step_context_serialization_roundtrip() {
|
||||||
|
let ctx = test_ctx();
|
||||||
|
let json = serde_json::to_value(&ctx).unwrap();
|
||||||
|
let deserialized: StepContext = serde_json::from_value(json).unwrap();
|
||||||
|
assert_eq!(deserialized.domain, "test.local");
|
||||||
|
assert_eq!(deserialized.kube_context, "test-cluster");
|
||||||
|
assert!(!deserialized.is_production);
|
||||||
|
assert_eq!(deserialized.context_name, "test");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_step_context_production_flag() {
|
||||||
|
let local = test_ctx();
|
||||||
|
assert!(!local.is_production);
|
||||||
|
assert!(local.ssh_host.is_empty());
|
||||||
|
|
||||||
|
let prod = production_ctx();
|
||||||
|
assert!(prod.is_production);
|
||||||
|
assert!(!prod.ssh_host.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_step_context_bao_client_construction() {
|
||||||
|
let ctx = test_ctx();
|
||||||
|
let client = ctx.bao_client(8200, "test-token");
|
||||||
|
// BaoClient is opaque, but we can verify it doesn't panic
|
||||||
|
// and the base_url is constructed correctly by checking it exists
|
||||||
|
drop(client);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_step_context_bao_client_ephemeral_port() {
|
||||||
|
let ctx = test_ctx();
|
||||||
|
let client = ctx.bao_client(49152, "some-token");
|
||||||
|
drop(client);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_step_context_embedded_in_json() {
|
||||||
|
let ctx = test_ctx();
|
||||||
|
let wrapper = serde_json::json!({
|
||||||
|
"__ctx": ctx,
|
||||||
|
"some_field": "value",
|
||||||
|
});
|
||||||
|
let extracted: StepContext =
|
||||||
|
serde_json::from_value(wrapper["__ctx"].clone()).unwrap();
|
||||||
|
assert_eq!(extracted.domain, "test.local");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_from_config_local_context() {
|
||||||
|
let ctx = crate::config::Context {
|
||||||
|
domain: "local.dev".to_string(),
|
||||||
|
kube_context: "k3s-local".to_string(),
|
||||||
|
ssh_host: String::new(),
|
||||||
|
infra_dir: "/home/user/infra".to_string(),
|
||||||
|
acme_email: "admin@local.dev".to_string(),
|
||||||
|
};
|
||||||
|
let sc = StepContext::from_config(&ctx, "local");
|
||||||
|
assert_eq!(sc.domain, "local.dev");
|
||||||
|
assert_eq!(sc.kube_context, "k3s-local");
|
||||||
|
assert!(!sc.is_production);
|
||||||
|
assert_eq!(sc.context_name, "local");
|
||||||
|
assert_eq!(sc.infra_dir, "/home/user/infra");
|
||||||
|
assert_eq!(sc.acme_email, "admin@local.dev");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_from_config_production_context() {
|
||||||
|
let ctx = crate::config::Context {
|
||||||
|
domain: "sunbeam.pt".to_string(),
|
||||||
|
kube_context: "production".to_string(),
|
||||||
|
ssh_host: "sienna@62.210.145.138".to_string(),
|
||||||
|
infra_dir: "/srv/infra".to_string(),
|
||||||
|
acme_email: "ops@sunbeam.pt".to_string(),
|
||||||
|
};
|
||||||
|
let sc = StepContext::from_config(&ctx, "production");
|
||||||
|
assert!(sc.is_production);
|
||||||
|
assert_eq!(sc.ssh_host, "sienna@62.210.145.138");
|
||||||
|
assert_eq!(sc.context_name, "production");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_from_config_empty_context_name_defaults() {
|
||||||
|
let ctx = crate::config::Context::default();
|
||||||
|
let sc = StepContext::from_config(&ctx, "");
|
||||||
|
assert_eq!(sc.context_name, "default");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_from_config_empty_kube_context_defaults_sunbeam() {
|
||||||
|
let ctx = crate::config::Context {
|
||||||
|
kube_context: String::new(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let sc = StepContext::from_config(&ctx, "test");
|
||||||
|
assert_eq!(sc.kube_context, "sunbeam");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_step_context_empty_fields() {
|
||||||
|
let ctx = StepContext {
|
||||||
|
domain: String::new(),
|
||||||
|
infra_dir: String::new(),
|
||||||
|
kube_context: String::new(),
|
||||||
|
ssh_host: String::new(),
|
||||||
|
is_production: false,
|
||||||
|
acme_email: String::new(),
|
||||||
|
context_name: String::new(),
|
||||||
|
};
|
||||||
|
let json = serde_json::to_string(&ctx).unwrap();
|
||||||
|
let back: StepContext = serde_json::from_str(&json).unwrap();
|
||||||
|
assert!(back.domain.is_empty());
|
||||||
|
assert!(!back.is_production);
|
||||||
|
}
|
||||||
|
}
|
||||||
236
src/workflows/seed/definition.rs
Normal file
236
src/workflows/seed/definition.rs
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
//! Seed workflow definition — linear sequence of all seed steps.
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::WorkflowDefinition;
|
||||||
|
|
||||||
|
use super::steps;
|
||||||
|
use crate::workflows::primitives::{
|
||||||
|
CollectCredentials, CreateK8sSecret, CreatePGDatabase, CreatePGRole,
|
||||||
|
EnableVaultAuth, EnsureNamespace, SeedKVPath, WriteKVPath,
|
||||||
|
WriteVaultAuthConfig, WriteVaultPolicy, WriteVaultRole,
|
||||||
|
};
|
||||||
|
use crate::workflows::primitives::kv_service_configs;
|
||||||
|
use steps::postgres::pg_db_map;
|
||||||
|
|
||||||
|
/// Build the seed workflow definition.
|
||||||
|
pub fn build() -> WorkflowDefinition {
|
||||||
|
WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<steps::FindOpenBaoPod>()
|
||||||
|
.name("find-openbao-pod")
|
||||||
|
.then::<steps::WaitPodRunning>()
|
||||||
|
.name("wait-pod-running")
|
||||||
|
.then::<steps::InitOrUnsealOpenBao>()
|
||||||
|
.name("init-or-unseal-openbao")
|
||||||
|
.parallel(|p| {
|
||||||
|
let mut p = p;
|
||||||
|
for cfg in kv_service_configs::all_service_configs() {
|
||||||
|
let service = cfg["service"].as_str().unwrap().to_string();
|
||||||
|
p = p.branch(|b| {
|
||||||
|
let seed_id = b.add_step_typed::<SeedKVPath>(
|
||||||
|
&format!("seed-{service}"), Some(cfg.clone()));
|
||||||
|
let write_id = b.add_step_typed::<WriteKVPath>(
|
||||||
|
&format!("write-{service}"), Some(json!({"service": &service})));
|
||||||
|
b.wire_outcome(seed_id, write_id, None);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
p.branch(|b| {
|
||||||
|
let seed_id = b.add_step_typed::<SeedKVPath>(
|
||||||
|
"seed-kratos-admin", Some(kv_service_configs::kratos_admin_config()));
|
||||||
|
let write_id = b.add_step_typed::<WriteKVPath>(
|
||||||
|
"write-kratos-admin", Some(json!({"service": "kratos-admin"})));
|
||||||
|
b.wire_outcome(seed_id, write_id, None);
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then::<CollectCredentials>()
|
||||||
|
.name("collect-credentials")
|
||||||
|
.then::<EnableVaultAuth>()
|
||||||
|
.name("enable-k8s-auth")
|
||||||
|
.config(json!({"mount": "kubernetes", "type": "kubernetes"}))
|
||||||
|
.then::<WriteVaultAuthConfig>()
|
||||||
|
.name("write-k8s-auth-config")
|
||||||
|
.config(json!({"mount": "kubernetes", "config": {
|
||||||
|
"kubernetes_host": "https://kubernetes.default.svc.cluster.local"
|
||||||
|
}}))
|
||||||
|
.then::<WriteVaultPolicy>()
|
||||||
|
.name("write-vso-policy")
|
||||||
|
.config(json!({"name": "vso-reader", "hcl": concat!(
|
||||||
|
"path \"secret/data/*\" { capabilities = [\"read\"] }\n",
|
||||||
|
"path \"secret/metadata/*\" { capabilities = [\"read\", \"list\"] }\n",
|
||||||
|
"path \"database/static-creds/*\" { capabilities = [\"read\"] }\n",
|
||||||
|
)}))
|
||||||
|
.then::<WriteVaultRole>()
|
||||||
|
.name("write-vso-role")
|
||||||
|
.config(json!({"mount": "kubernetes", "role": "vso", "config": {
|
||||||
|
"bound_service_account_names": "default",
|
||||||
|
"bound_service_account_namespaces": "ory,devtools,storage,lasuite,stalwart,matrix,media,data,monitoring,cert-manager",
|
||||||
|
"policies": "vso-reader",
|
||||||
|
"ttl": "1h"
|
||||||
|
}}))
|
||||||
|
.then::<steps::WaitForPostgres>()
|
||||||
|
.name("wait-for-postgres")
|
||||||
|
|
||||||
|
.parallel(|p| {
|
||||||
|
let db_map = pg_db_map();
|
||||||
|
let mut p = p;
|
||||||
|
for (user, db) in &db_map {
|
||||||
|
p = p.branch(|b| {
|
||||||
|
let role_id = b.add_step_typed::<CreatePGRole>(
|
||||||
|
&format!("pg-role-{user}"),
|
||||||
|
Some(json!({"username": user})),
|
||||||
|
);
|
||||||
|
let db_id = b.add_step_typed::<CreatePGDatabase>(
|
||||||
|
&format!("pg-db-{db}"),
|
||||||
|
Some(json!({"dbname": db, "owner": user})),
|
||||||
|
);
|
||||||
|
b.wire_outcome(role_id, db_id, None);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
p
|
||||||
|
})
|
||||||
|
|
||||||
|
.then::<steps::ConfigureDatabaseEngine>()
|
||||||
|
.name("configure-database-engine")
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-ory",
|
||||||
|
Some(json!({"namespace": "ory"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-hydra",
|
||||||
|
Some(json!({"namespace":"ory","name":"hydra","data":{
|
||||||
|
"secretsSystem":"hydra-system-secret",
|
||||||
|
"secretsCookie":"hydra-cookie-secret",
|
||||||
|
"pairwise-salt":"hydra-pairwise-salt"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-kratos-app",
|
||||||
|
Some(json!({"namespace":"ory","name":"kratos-app-secrets","data":{
|
||||||
|
"secretsDefault":"kratos-secrets-default",
|
||||||
|
"secretsCookie":"kratos-secrets-cookie"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-devtools",
|
||||||
|
Some(json!({"namespace": "devtools"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-gitea-s3",
|
||||||
|
Some(json!({"namespace":"devtools","name":"gitea-s3-credentials","data":{
|
||||||
|
"access-key":"s3-access-key",
|
||||||
|
"secret-key":"s3-secret-key"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-gitea-admin",
|
||||||
|
Some(json!({"namespace":"devtools","name":"gitea-admin-credentials","data":{
|
||||||
|
"username":"literal:gitea_admin",
|
||||||
|
"password":"gitea-admin-password"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-storage",
|
||||||
|
Some(json!({"namespace": "storage"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-seaweedfs-s3-creds",
|
||||||
|
Some(json!({"namespace":"storage","name":"seaweedfs-s3-credentials","data":{
|
||||||
|
"S3_ACCESS_KEY":"s3-access-key",
|
||||||
|
"S3_SECRET_KEY":"s3-secret-key"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-seaweedfs-s3-json",
|
||||||
|
Some(json!({"namespace":"storage","name":"seaweedfs-s3-json","data":{
|
||||||
|
"s3.json":"s3_json"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-lasuite",
|
||||||
|
Some(json!({"namespace": "lasuite"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-lasuite-s3",
|
||||||
|
Some(json!({"namespace":"lasuite","name":"seaweedfs-s3-credentials","data":{
|
||||||
|
"S3_ACCESS_KEY":"s3-access-key",
|
||||||
|
"S3_SECRET_KEY":"s3-secret-key"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-hive-oidc",
|
||||||
|
Some(json!({"namespace":"lasuite","name":"hive-oidc","data":{
|
||||||
|
"client-id":"hive-oidc-client-id",
|
||||||
|
"client-secret":"hive-oidc-client-secret"
|
||||||
|
}})));
|
||||||
|
let s3 = b.add_step_typed::<CreateK8sSecret>("secret-people-django",
|
||||||
|
Some(json!({"namespace":"lasuite","name":"people-django-secret","data":{
|
||||||
|
"DJANGO_SECRET_KEY":"people-django-secret"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
b.wire_outcome(s2, s3, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<EnsureNamespace>("ensure-ns-matrix",
|
||||||
|
Some(json!({"namespace": "matrix"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<EnsureNamespace>("ensure-ns-media",
|
||||||
|
Some(json!({"namespace": "media"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<EnsureNamespace>("ensure-ns-monitoring",
|
||||||
|
Some(json!({"namespace": "monitoring"})));
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.then::<steps::SyncGiteaAdminPassword>()
|
||||||
|
.name("sync-gitea-admin-password")
|
||||||
|
.then::<steps::SeedKratosAdminIdentity>()
|
||||||
|
.name("seed-kratos-admin-identity")
|
||||||
|
.then::<steps::PrintSeedOutputs>()
|
||||||
|
.name("print-seed-outputs")
|
||||||
|
.end_workflow()
|
||||||
|
.build("seed", 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_returns_valid_definition() {
|
||||||
|
let def = build();
|
||||||
|
assert_eq!(def.id, "seed");
|
||||||
|
assert_eq!(def.version, 2);
|
||||||
|
// More steps now due to parallel PG branches
|
||||||
|
assert!(def.steps.len() > 13, "expected >13 steps, got {}", def.steps.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_pg_role_and_db_steps() {
|
||||||
|
let def = build();
|
||||||
|
let role_steps: Vec<_> = def.steps.iter()
|
||||||
|
.filter(|s| s.step_type.contains("CreatePGRole"))
|
||||||
|
.collect();
|
||||||
|
let db_steps: Vec<_> = def.steps.iter()
|
||||||
|
.filter(|s| s.step_type.contains("CreatePGDatabase"))
|
||||||
|
.collect();
|
||||||
|
assert_eq!(role_steps.len(), 15, "should have 15 CreatePGRole steps");
|
||||||
|
assert_eq!(db_steps.len(), 15, "should have 15 CreatePGDatabase steps");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_pg_steps_have_config() {
|
||||||
|
let def = build();
|
||||||
|
for s in &def.steps {
|
||||||
|
if s.step_type.contains("CreatePGRole") {
|
||||||
|
let config = s.step_config.as_ref().expect("CreatePGRole missing config");
|
||||||
|
assert!(config.get("username").is_some());
|
||||||
|
}
|
||||||
|
if s.step_type.contains("CreatePGDatabase") {
|
||||||
|
let config = s.step_config.as_ref().expect("CreatePGDatabase missing config");
|
||||||
|
assert!(config.get("dbname").is_some());
|
||||||
|
assert!(config.get("owner").is_some());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_first_and_last_steps() {
|
||||||
|
let def = build();
|
||||||
|
assert_eq!(def.steps[0].name, Some("find-openbao-pod".into()));
|
||||||
|
let last = def.steps.last().unwrap();
|
||||||
|
assert_eq!(last.name, Some("print-seed-outputs".into()));
|
||||||
|
}
|
||||||
|
}
|
||||||
183
src/workflows/seed/mod.rs
Normal file
183
src/workflows/seed/mod.rs
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
//! Seed workflow — orchestrates OpenBao init, KV seeding, Postgres setup,
|
||||||
|
//! K8s secret mirroring, and Kratos admin identity creation.
|
||||||
|
|
||||||
|
pub mod definition;
|
||||||
|
pub mod steps;
|
||||||
|
|
||||||
|
use crate::output;
|
||||||
|
|
||||||
|
/// Register all seed workflow steps and the workflow definition with a host.
|
||||||
|
pub async fn register(host: &wfe::WorkflowHost) {
|
||||||
|
// Primitive steps (config-driven, reusable)
|
||||||
|
host.register_step::<crate::workflows::primitives::CreatePGRole>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CreatePGDatabase>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::EnsureNamespace>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CreateK8sSecret>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::EnableVaultAuth>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteVaultAuthConfig>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteVaultPolicy>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteVaultRole>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::SeedKVPath>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteKVPath>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CollectCredentials>().await;
|
||||||
|
|
||||||
|
// Seed-specific steps
|
||||||
|
host.register_step::<steps::FindOpenBaoPod>().await;
|
||||||
|
host.register_step::<steps::WaitPodRunning>().await;
|
||||||
|
host.register_step::<steps::InitOrUnsealOpenBao>().await;
|
||||||
|
host.register_step::<steps::WaitForPostgres>().await;
|
||||||
|
host.register_step::<steps::ConfigureDatabaseEngine>().await;
|
||||||
|
host.register_step::<steps::SyncGiteaAdminPassword>().await;
|
||||||
|
host.register_step::<steps::SeedKratosAdminIdentity>().await;
|
||||||
|
host.register_step::<steps::PrintSeedOutputs>().await;
|
||||||
|
|
||||||
|
// Register workflow definition
|
||||||
|
host.register_workflow_definition(definition::build()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a summary of the completed seed workflow.
|
||||||
|
pub fn print_summary(instance: &wfe_core::models::WorkflowInstance) {
|
||||||
|
output::step("Seed workflow summary:");
|
||||||
|
for ep in &instance.execution_pointers {
|
||||||
|
let fallback = format!("step-{}", ep.step_id);
|
||||||
|
let name = ep.step_name.as_deref().unwrap_or(&fallback);
|
||||||
|
let status = format!("{:?}", ep.status);
|
||||||
|
let duration = match (ep.start_time, ep.end_time) {
|
||||||
|
(Some(start), Some(end)) => {
|
||||||
|
let d = end - start;
|
||||||
|
format!("{}ms", d.num_milliseconds())
|
||||||
|
}
|
||||||
|
_ => "-".to_string(),
|
||||||
|
};
|
||||||
|
output::ok(&format!(" {name:<40} {status:<12} {duration}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::time::Duration;
|
||||||
|
use wfe::run_workflow_sync;
|
||||||
|
use wfe_core::models::WorkflowStatus;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_register_all_steps_and_definition() {
|
||||||
|
let host = crate::workflows::host::create_test_host().await.unwrap();
|
||||||
|
register(&host).await;
|
||||||
|
|
||||||
|
let def = definition::build();
|
||||||
|
assert_eq!(def.id, "seed");
|
||||||
|
assert!(def.steps.len() > 13);
|
||||||
|
|
||||||
|
// Run a minimal skip-path test to prove steps are registered
|
||||||
|
let skip_def = wfe_core::builder::WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<steps::WaitPodRunning>()
|
||||||
|
.name("wait-pod-running")
|
||||||
|
.then::<steps::ConfigureDatabaseEngine>()
|
||||||
|
.name("configure-db")
|
||||||
|
.then::<steps::SyncGiteaAdminPassword>()
|
||||||
|
.name("sync-gitea")
|
||||||
|
.then::<steps::PrintSeedOutputs>()
|
||||||
|
.name("print-outputs")
|
||||||
|
.end_workflow()
|
||||||
|
.build("seed-skip-test", 1);
|
||||||
|
host.register_workflow_definition(skip_def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"seed-skip-test",
|
||||||
|
1,
|
||||||
|
serde_json::json!({ "skip_seed": true }),
|
||||||
|
Duration::from_secs(10),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_summary_with_completed_workflow() {
|
||||||
|
let host = crate::workflows::host::create_test_host().await.unwrap();
|
||||||
|
register(&host).await;
|
||||||
|
|
||||||
|
let def = wfe_core::builder::WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<steps::WaitPodRunning>()
|
||||||
|
.name("wait-pod-running")
|
||||||
|
.then::<steps::PrintSeedOutputs>()
|
||||||
|
.name("print-seed-outputs")
|
||||||
|
.end_workflow()
|
||||||
|
.build("summary-test", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"summary-test",
|
||||||
|
1,
|
||||||
|
serde_json::json!({ "skip_seed": true }),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Should not panic — just prints to stdout
|
||||||
|
print_summary(&instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_summary_handles_both_named_and_unnamed_steps() {
|
||||||
|
let host = crate::workflows::host::create_test_host().await.unwrap();
|
||||||
|
register(&host).await;
|
||||||
|
|
||||||
|
let def = wfe_core::builder::WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<steps::WaitPodRunning>()
|
||||||
|
.name("wait-pod-running")
|
||||||
|
.then::<steps::PrintSeedOutputs>()
|
||||||
|
.name("print-seed-outputs")
|
||||||
|
.end_workflow()
|
||||||
|
.build("names-test", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
|
||||||
|
let instance = run_workflow_sync(
|
||||||
|
&host,
|
||||||
|
"names-test",
|
||||||
|
1,
|
||||||
|
serde_json::json!({ "skip_seed": true }),
|
||||||
|
Duration::from_secs(5),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// print_summary should handle both named and unnamed steps gracefully
|
||||||
|
print_summary(&instance);
|
||||||
|
assert_eq!(instance.execution_pointers.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_summary_with_missing_step_names() {
|
||||||
|
// Construct a synthetic instance with no step names to exercise fallback
|
||||||
|
let mut instance = wfe_core::models::WorkflowInstance::new("test", 1, serde_json::json!({}));
|
||||||
|
let mut ep = wfe_core::models::ExecutionPointer::new(0);
|
||||||
|
ep.step_name = None;
|
||||||
|
ep.status = wfe_core::models::PointerStatus::Complete;
|
||||||
|
ep.start_time = Some(chrono::Utc::now());
|
||||||
|
ep.end_time = Some(chrono::Utc::now());
|
||||||
|
instance.execution_pointers.push(ep);
|
||||||
|
// Should not panic — uses "step-0" fallback
|
||||||
|
print_summary(&instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_summary_with_missing_times() {
|
||||||
|
let mut instance = wfe_core::models::WorkflowInstance::new("test", 1, serde_json::json!({}));
|
||||||
|
let mut ep = wfe_core::models::ExecutionPointer::new(0);
|
||||||
|
ep.step_name = Some("test-step".to_string());
|
||||||
|
ep.status = wfe_core::models::PointerStatus::Complete;
|
||||||
|
ep.start_time = None;
|
||||||
|
ep.end_time = None;
|
||||||
|
instance.execution_pointers.push(ep);
|
||||||
|
// Should print "-" for duration
|
||||||
|
print_summary(&instance);
|
||||||
|
}
|
||||||
|
}
|
||||||
454
src/workflows/seed/steps/kratos_admin.rs
Normal file
454
src/workflows/seed/steps/kratos_admin.rs
Normal file
@@ -0,0 +1,454 @@
|
|||||||
|
//! Kratos admin identity steps: seed admin identity, print outputs.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::error::SunbeamError;
|
||||||
|
use crate::kube as k;
|
||||||
|
use crate::openbao::BaoClient;
|
||||||
|
use crate::output::{ok, warn};
|
||||||
|
use crate::secrets::{self, KratosIdentity, KratosRecovery, ADMIN_USERNAME};
|
||||||
|
use crate::workflows::data::SeedData;
|
||||||
|
|
||||||
|
// ── Pure helpers (testable without K8s) ─────────────────────────────────────
|
||||||
|
|
||||||
|
/// Strip PEM header/footer lines and whitespace from a public key,
|
||||||
|
/// returning the raw base64 content.
|
||||||
|
pub(crate) fn strip_pem_headers(pem: &str) -> String {
|
||||||
|
pem.replace("-----BEGIN PUBLIC KEY-----", "")
|
||||||
|
.replace("-----END PUBLIC KEY-----", "")
|
||||||
|
.replace("-----BEGIN RSA PUBLIC KEY-----", "")
|
||||||
|
.replace("-----END RSA PUBLIC KEY-----", "")
|
||||||
|
.split_whitespace()
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Format a DKIM DNS TXT record from domain and base64-encoded public key.
|
||||||
|
pub(crate) fn format_dkim_record(domain: &str, b64_key: &str) -> String {
|
||||||
|
format!("default._domainkey.{domain} TXT \"v=DKIM1; k=rsa; p={b64_key}\"")
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Build the admin email from the domain.
|
||||||
|
pub(crate) fn admin_email(domain: &str) -> String {
|
||||||
|
format!("{ADMIN_USERNAME}@{domain}")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── SeedKratosAdminIdentity ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Port-forward to Kratos, check/create admin identity, generate recovery code,
|
||||||
|
/// update OpenBao.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct SeedKratosAdminIdentity;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for SeedKratosAdminIdentity {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data: SeedData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
if data.skip_seed {
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
let ob_pod = match &data.ob_pod {
|
||||||
|
Some(p) => p.clone(),
|
||||||
|
None => return Ok(ExecutionResult::next()),
|
||||||
|
};
|
||||||
|
let root_token = match &data.root_token {
|
||||||
|
Some(t) if !t.is_empty() => t.clone(),
|
||||||
|
_ => return Ok(ExecutionResult::next()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let domain = match k::get_domain().await {
|
||||||
|
Ok(d) => d,
|
||||||
|
Err(e) => {
|
||||||
|
warn(&format!("Could not determine domain: {e}"));
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let admin_email = admin_email(&domain);
|
||||||
|
ok(&format!(
|
||||||
|
"Ensuring Kratos admin identity ({admin_email})..."
|
||||||
|
));
|
||||||
|
|
||||||
|
let pf_bao = secrets::port_forward("data", &ob_pod, 8200).await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
let bao_url = format!("http://127.0.0.1:{}", pf_bao.local_port);
|
||||||
|
let bao = BaoClient::with_token(&bao_url, &root_token);
|
||||||
|
|
||||||
|
let result: std::result::Result<(String, String, String), SunbeamError> = async {
|
||||||
|
let pf = match secrets::port_forward_svc(
|
||||||
|
"ory",
|
||||||
|
"app.kubernetes.io/name=kratos-admin",
|
||||||
|
80,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(pf) => pf,
|
||||||
|
Err(_) => {
|
||||||
|
secrets::port_forward_svc("ory", "app.kubernetes.io/name=kratos", 4434)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
SunbeamError::Other(format!(
|
||||||
|
"Could not port-forward to Kratos admin API: {e}"
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let base = format!("http://127.0.0.1:{}", pf.local_port);
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||||
|
|
||||||
|
let http = reqwest::Client::new();
|
||||||
|
|
||||||
|
let resp = http
|
||||||
|
.get(format!(
|
||||||
|
"{base}/admin/identities?credentials_identifier={admin_email}&page_size=1"
|
||||||
|
))
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let identities: Vec<KratosIdentity> = resp.json().await.unwrap_or_default();
|
||||||
|
let identity_id = if let Some(existing) = identities.first() {
|
||||||
|
ok(&format!(
|
||||||
|
" admin identity exists ({}...)",
|
||||||
|
&existing.id[..8.min(existing.id.len())]
|
||||||
|
));
|
||||||
|
existing.id.clone()
|
||||||
|
} else {
|
||||||
|
let resp = http
|
||||||
|
.post(format!("{base}/admin/identities"))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.json(&serde_json::json!({
|
||||||
|
"schema_id": "employee",
|
||||||
|
"traits": {"email": admin_email},
|
||||||
|
"state": "active",
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let identity: KratosIdentity = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| SunbeamError::Other(e.to_string()))?;
|
||||||
|
ok(&format!(
|
||||||
|
" created admin identity ({}...)",
|
||||||
|
&identity.id[..8.min(identity.id.len())]
|
||||||
|
));
|
||||||
|
identity.id
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = http
|
||||||
|
.post(format!("{base}/admin/recovery/code"))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.header("Accept", "application/json")
|
||||||
|
.json(&serde_json::json!({
|
||||||
|
"identity_id": identity_id,
|
||||||
|
"expires_in": "24h",
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let recovery: KratosRecovery = resp.json().await.unwrap_or(KratosRecovery {
|
||||||
|
recovery_link: String::new(),
|
||||||
|
recovery_code: String::new(),
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut patch_data = HashMap::new();
|
||||||
|
patch_data.insert("admin-identity-ids".to_string(), admin_email.clone());
|
||||||
|
let _ = bao.kv_patch("secret", "kratos-admin", &patch_data).await;
|
||||||
|
ok(&format!(" ADMIN_IDENTITY_IDS set to {admin_email}"));
|
||||||
|
|
||||||
|
Ok((recovery.recovery_link, recovery.recovery_code, identity_id))
|
||||||
|
}
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let mut output = ExecutionResult::next();
|
||||||
|
match result {
|
||||||
|
Ok((recovery_link, recovery_code, identity_id)) => {
|
||||||
|
output.output_data = Some(serde_json::json!({
|
||||||
|
"recovery_link": recovery_link,
|
||||||
|
"recovery_code": recovery_code,
|
||||||
|
"admin_identity_id": identity_id,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn(&format!(
|
||||||
|
"Could not seed Kratos admin identity (Kratos may not be ready): {e}"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(output)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── PrintSeedOutputs ────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Print DKIM record and recovery link/code.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PrintSeedOutputs;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for PrintSeedOutputs {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data: SeedData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
if data.skip_seed {
|
||||||
|
ok("Seed skipped (OpenBao not available).");
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref link) = data.recovery_link {
|
||||||
|
if !link.is_empty() {
|
||||||
|
ok("Admin recovery link (valid 24h):");
|
||||||
|
println!(" {link}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(ref code) = data.recovery_code {
|
||||||
|
if !code.is_empty() {
|
||||||
|
ok("Admin recovery code (enter on the page above):");
|
||||||
|
println!(" {code}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let dkim_pub = data
|
||||||
|
.creds
|
||||||
|
.get("messages-dkim-public-key")
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
if !dkim_pub.is_empty() {
|
||||||
|
let b64_key = strip_pem_headers(&dkim_pub);
|
||||||
|
|
||||||
|
if let Ok(domain) = k::get_domain().await {
|
||||||
|
ok("DKIM DNS record (add to DNS at your registrar):");
|
||||||
|
println!(" {}", format_dkim_record(&domain, &b64_key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ok("All secrets seeded.");
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::time::Duration;
|
||||||
|
use wfe::run_workflow_sync;
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::WorkflowStatus;
|
||||||
|
|
||||||
|
async fn run_step<S: StepBody + Default + 'static>(
|
||||||
|
data: serde_json::Value,
|
||||||
|
) -> wfe_core::models::WorkflowInstance {
|
||||||
|
let host = crate::workflows::host::create_test_host().await.unwrap();
|
||||||
|
host.register_step::<S>().await;
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<S>()
|
||||||
|
.name("test-step")
|
||||||
|
.end_workflow()
|
||||||
|
.build("test-wf", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
let instance = run_workflow_sync(&host, "test-wf", 1, data, Duration::from_secs(5))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
host.stop().await;
|
||||||
|
instance
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── SeedKratosAdminIdentity ─────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_seed_kratos_admin_skip_seed() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": true });
|
||||||
|
let instance = run_step::<SeedKratosAdminIdentity>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_seed_kratos_admin_no_ob_pod() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false });
|
||||||
|
let instance = run_step::<SeedKratosAdminIdentity>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_seed_kratos_admin_null_ob_pod() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false, "ob_pod": null });
|
||||||
|
let instance = run_step::<SeedKratosAdminIdentity>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_seed_kratos_admin_no_root_token() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false, "ob_pod": "openbao-0" });
|
||||||
|
let instance = run_step::<SeedKratosAdminIdentity>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_seed_kratos_admin_empty_root_token() {
|
||||||
|
let data = serde_json::json!({
|
||||||
|
"skip_seed": false,
|
||||||
|
"ob_pod": "openbao-0",
|
||||||
|
"root_token": "",
|
||||||
|
});
|
||||||
|
let instance = run_step::<SeedKratosAdminIdentity>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── PrintSeedOutputs ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_seed_outputs_skip_seed() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": true });
|
||||||
|
let instance = run_step::<PrintSeedOutputs>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_seed_outputs_no_recovery_link() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false });
|
||||||
|
let instance = run_step::<PrintSeedOutputs>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_seed_outputs_with_recovery_data() {
|
||||||
|
let data = serde_json::json!({
|
||||||
|
"skip_seed": false,
|
||||||
|
"recovery_link": "https://login.test.local/self-service/recovery?flow=abc",
|
||||||
|
"recovery_code": "123456",
|
||||||
|
});
|
||||||
|
let instance = run_step::<PrintSeedOutputs>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_seed_outputs_with_empty_recovery() {
|
||||||
|
let data = serde_json::json!({
|
||||||
|
"skip_seed": false,
|
||||||
|
"recovery_link": "",
|
||||||
|
"recovery_code": "",
|
||||||
|
});
|
||||||
|
let instance = run_step::<PrintSeedOutputs>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: PrintSeedOutputs with a non-empty DKIM key calls k::get_domain()
|
||||||
|
// which requires a live kube cluster. The DKIM stripping logic is tested
|
||||||
|
// separately in test_dkim_key_stripping below.
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_seed_outputs_empty_dkim_key() {
|
||||||
|
let data = serde_json::json!({
|
||||||
|
"skip_seed": false,
|
||||||
|
"creds": { "messages-dkim-public-key": "" },
|
||||||
|
});
|
||||||
|
let instance = run_step::<PrintSeedOutputs>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Data deserialization ────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_recovery_fields() {
|
||||||
|
let json = serde_json::json!({
|
||||||
|
"skip_seed": false,
|
||||||
|
"recovery_link": "https://example.com/recovery",
|
||||||
|
"recovery_code": "abc123",
|
||||||
|
"admin_identity_id": "id-uuid-here",
|
||||||
|
});
|
||||||
|
let data: SeedData = serde_json::from_value(json).unwrap();
|
||||||
|
assert_eq!(data.recovery_link.as_deref(), Some("https://example.com/recovery"));
|
||||||
|
assert_eq!(data.recovery_code.as_deref(), Some("abc123"));
|
||||||
|
assert_eq!(data.admin_identity_id.as_deref(), Some("id-uuid-here"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_seed_data_recovery_fields_default_none() {
|
||||||
|
let json = serde_json::json!({ "skip_seed": false });
|
||||||
|
let data: SeedData = serde_json::from_value(json).unwrap();
|
||||||
|
assert!(data.recovery_link.is_none());
|
||||||
|
assert!(data.recovery_code.is_none());
|
||||||
|
assert!(data.admin_identity_id.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_dkim_key_stripping() {
|
||||||
|
let raw = "-----BEGIN PUBLIC KEY-----\nMIIBIjAN\n-----END PUBLIC KEY-----";
|
||||||
|
let b64_key = strip_pem_headers(raw);
|
||||||
|
assert_eq!(b64_key, "MIIBIjAN");
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Pure helper tests ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_pem_headers_standard() {
|
||||||
|
let pem = "-----BEGIN PUBLIC KEY-----\nMIIBIjAN\nBgkqhki\n-----END PUBLIC KEY-----";
|
||||||
|
assert_eq!(strip_pem_headers(pem), "MIIBIjANBgkqhki");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_pem_headers_rsa_variant() {
|
||||||
|
let pem = "-----BEGIN RSA PUBLIC KEY-----\nABC123\n-----END RSA PUBLIC KEY-----";
|
||||||
|
assert_eq!(strip_pem_headers(pem), "ABC123");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_pem_headers_no_headers() {
|
||||||
|
assert_eq!(strip_pem_headers("MIIBIjAN"), "MIIBIjAN");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_pem_headers_empty() {
|
||||||
|
assert_eq!(strip_pem_headers(""), "");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_strip_pem_headers_multiline() {
|
||||||
|
let pem = "-----BEGIN PUBLIC KEY-----\nAAAA\nBBBB\nCCCC\n-----END PUBLIC KEY-----\n";
|
||||||
|
assert_eq!(strip_pem_headers(pem), "AAAABBBBCCCC");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_dkim_record() {
|
||||||
|
let record = format_dkim_record("sunbeam.pt", "MIIBIjAN");
|
||||||
|
assert_eq!(
|
||||||
|
record,
|
||||||
|
"default._domainkey.sunbeam.pt TXT \"v=DKIM1; k=rsa; p=MIIBIjAN\""
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_dkim_record_different_domain() {
|
||||||
|
let record = format_dkim_record("example.com", "ABC123");
|
||||||
|
assert!(record.contains("example.com"));
|
||||||
|
assert!(record.contains("ABC123"));
|
||||||
|
assert!(record.starts_with("default._domainkey."));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_admin_email() {
|
||||||
|
let email = admin_email("sunbeam.pt");
|
||||||
|
assert_eq!(email, format!("{}@sunbeam.pt", ADMIN_USERNAME));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_admin_email_different_domain() {
|
||||||
|
let email = admin_email("example.com");
|
||||||
|
assert!(email.ends_with("@example.com"));
|
||||||
|
assert!(email.starts_with(ADMIN_USERNAME));
|
||||||
|
}
|
||||||
|
}
|
||||||
12
src/workflows/seed/steps/mod.rs
Normal file
12
src/workflows/seed/steps/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
//! Seed workflow steps — each module contains one or more WFE step structs.
|
||||||
|
|
||||||
|
pub mod k8s_secrets;
|
||||||
|
pub mod kratos_admin;
|
||||||
|
pub mod kv_seeding;
|
||||||
|
pub mod openbao_init;
|
||||||
|
pub mod postgres;
|
||||||
|
|
||||||
|
pub use k8s_secrets::SyncGiteaAdminPassword;
|
||||||
|
pub use kratos_admin::{PrintSeedOutputs, SeedKratosAdminIdentity};
|
||||||
|
pub use openbao_init::{FindOpenBaoPod, InitOrUnsealOpenBao, WaitPodRunning};
|
||||||
|
pub use postgres::{ConfigureDatabaseEngine, WaitForPostgres};
|
||||||
309
src/workflows/seed/steps/openbao_init.rs
Normal file
309
src/workflows/seed/steps/openbao_init.rs
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
//! OpenBao initialization steps: find pod, wait for Running, init/unseal, enable KV.
|
||||||
|
//!
|
||||||
|
//! These steps are data-struct-agnostic — they read/write individual JSON fields
|
||||||
|
//! rather than deserializing a full typed struct. This makes them reusable across
|
||||||
|
//! the `seed`, `up`, and `verify` workflows.
|
||||||
|
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::kube as k;
|
||||||
|
use crate::openbao::BaoClient;
|
||||||
|
use crate::output::{ok, warn};
|
||||||
|
use crate::secrets;
|
||||||
|
use crate::workflows::StepContext;
|
||||||
|
|
||||||
|
fn step_err(msg: impl Into<String>) -> wfe_core::WfeError {
|
||||||
|
wfe_core::WfeError::StepExecution(msg.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── FindOpenBaoPod ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Find the OpenBao server pod by label selector.
|
||||||
|
/// Reads `__ctx` to set kube context. Sets `ob_pod` or `skip_seed=true`.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct FindOpenBaoPod;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for FindOpenBaoPod {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let step_ctx: StepContext = serde_json::from_value(
|
||||||
|
ctx.workflow.data.get("__ctx").cloned().unwrap_or_default()
|
||||||
|
).map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
k::set_context(&step_ctx.kube_context, &step_ctx.ssh_host);
|
||||||
|
|
||||||
|
let client = k::get_client().await.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
||||||
|
kube::Api::namespaced(client.clone(), "data");
|
||||||
|
let lp = kube::api::ListParams::default()
|
||||||
|
.labels("app.kubernetes.io/name=openbao,component=server");
|
||||||
|
let pod_list = pods.list(&lp).await.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
let ob_pod = pod_list
|
||||||
|
.items
|
||||||
|
.first()
|
||||||
|
.and_then(|p| p.metadata.name.as_deref());
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
match ob_pod {
|
||||||
|
Some(name) => {
|
||||||
|
ok(&format!("OpenBao ({name})..."));
|
||||||
|
result.output_data = Some(serde_json::json!({ "ob_pod": name }));
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
ok("OpenBao pod not found -- skipping.");
|
||||||
|
result.output_data = Some(serde_json::json!({ "skip_seed": true }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── WaitPodRunning ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Wait for the OpenBao pod to reach Running state (up to 5 min).
|
||||||
|
/// Reads `ob_pod`, `skip_seed`. No-op if skip_seed or ob_pod is absent.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct WaitPodRunning;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for WaitPodRunning {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
if ctx.workflow.data.get("skip_seed").and_then(|v| v.as_bool()).unwrap_or(false) {
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
let ob_pod = match ctx.workflow.data.get("ob_pod").and_then(|v| v.as_str()) {
|
||||||
|
Some(p) => p.to_string(),
|
||||||
|
None => return Ok(ExecutionResult::next()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let _ = secrets::wait_pod_running("data", &ob_pod, 300).await;
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── InitOrUnsealOpenBao ─────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Port-forward to OpenBao, check seal status, init if needed (storing keys
|
||||||
|
/// in K8s secret), unseal if needed, enable KV engine.
|
||||||
|
/// Reads `ob_pod`, `skip_seed`. Sets `ob_port`, `root_token`, or `skip_seed`.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct InitOrUnsealOpenBao;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for InitOrUnsealOpenBao {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
if ctx.workflow.data.get("skip_seed").and_then(|v| v.as_bool()).unwrap_or(false) {
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
let ob_pod = match ctx.workflow.data.get("ob_pod").and_then(|v| v.as_str()) {
|
||||||
|
Some(p) => p.to_string(),
|
||||||
|
None => return Ok(ExecutionResult::next()),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Port-forward with retries
|
||||||
|
let mut pf = None;
|
||||||
|
for attempt in 0..10 {
|
||||||
|
match secrets::port_forward("data", &ob_pod, 8200).await {
|
||||||
|
Ok(p) => { pf = Some(p); break; }
|
||||||
|
Err(e) => {
|
||||||
|
if attempt < 9 {
|
||||||
|
ok(&format!("Waiting for OpenBao to accept connections (attempt {})...", attempt + 1));
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
|
||||||
|
} else {
|
||||||
|
return Err(step_err(format!(
|
||||||
|
"Port-forward to OpenBao failed after 10 attempts: {e}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let pf = pf.unwrap();
|
||||||
|
let bao_url = format!("http://127.0.0.1:{}", pf.local_port);
|
||||||
|
let bao = BaoClient::new(&bao_url);
|
||||||
|
|
||||||
|
// Wait for API to respond
|
||||||
|
let mut status = None;
|
||||||
|
for attempt in 0..30 {
|
||||||
|
match bao.seal_status().await {
|
||||||
|
Ok(s) => { status = Some(s); break; }
|
||||||
|
Err(_) if attempt < 29 => {
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
||||||
|
}
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut unseal_key = String::new();
|
||||||
|
let mut root_token = String::new();
|
||||||
|
|
||||||
|
let status = status.unwrap_or_else(|| crate::openbao::SealStatusResponse {
|
||||||
|
initialized: false, sealed: true, progress: 0, t: 0, n: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check if truly initialized (not just a placeholder secret)
|
||||||
|
let mut already_initialized = status.initialized;
|
||||||
|
if !already_initialized {
|
||||||
|
if let Ok(key) = k::kube_get_secret_field("data", "openbao-keys", "key").await {
|
||||||
|
if !key.is_empty() && key != "placeholder" {
|
||||||
|
already_initialized = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !already_initialized {
|
||||||
|
ok("Initializing OpenBao...");
|
||||||
|
match bao.init(1, 1).await {
|
||||||
|
Ok(init) => {
|
||||||
|
unseal_key = init.unseal_keys_b64[0].clone();
|
||||||
|
root_token = init.root_token.clone();
|
||||||
|
let mut secret_data = HashMap::new();
|
||||||
|
secret_data.insert("key".to_string(), unseal_key.clone());
|
||||||
|
secret_data.insert("root-token".to_string(), root_token.clone());
|
||||||
|
k::create_secret("data", "openbao-keys", secret_data).await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
ok("Initialized -- keys stored in secret/openbao-keys.");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn(&format!("Init failed -- resetting OpenBao storage... ({e})"));
|
||||||
|
let _ = secrets::delete_resource("data", "pvc", "data-openbao-0").await;
|
||||||
|
let _ = secrets::delete_resource("data", "pod", &ob_pod).await;
|
||||||
|
warn("OpenBao storage reset. Run again after the pod restarts.");
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "skip_seed": true }));
|
||||||
|
return Ok(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ok("Already initialized.");
|
||||||
|
if let Ok(key) = k::kube_get_secret_field("data", "openbao-keys", "key").await {
|
||||||
|
if key != "placeholder" { unseal_key = key; }
|
||||||
|
}
|
||||||
|
if let Ok(token) = k::kube_get_secret_field("data", "openbao-keys", "root-token").await {
|
||||||
|
if token != "placeholder" { root_token = token; }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unseal if needed
|
||||||
|
let status = bao.seal_status().await.unwrap_or_else(|_| {
|
||||||
|
crate::openbao::SealStatusResponse {
|
||||||
|
initialized: true, sealed: true, progress: 0, t: 0, n: 0,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if status.sealed && !unseal_key.is_empty() {
|
||||||
|
ok("Unsealing...");
|
||||||
|
bao.unseal(&unseal_key).await
|
||||||
|
.map_err(|e| step_err(format!("Failed to unseal OpenBao: {e}")))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if root_token.is_empty() {
|
||||||
|
warn("No root token available -- skipping vault operations.");
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "skip_seed": true }));
|
||||||
|
return Ok(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Enable & tune KV engine
|
||||||
|
let bao = BaoClient::with_token(&bao_url, &root_token);
|
||||||
|
ok("Enabling KV engine...");
|
||||||
|
let _ = bao.enable_secrets_engine("secret", "kv").await;
|
||||||
|
let _ = bao
|
||||||
|
.write(
|
||||||
|
"sys/mounts/secret/tune",
|
||||||
|
&serde_json::json!({"options": {"version": "2"}}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({
|
||||||
|
"ob_port": pf.local_port,
|
||||||
|
"root_token": root_token,
|
||||||
|
}));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use std::time::Duration;
|
||||||
|
use wfe::run_workflow_sync;
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::WorkflowStatus;
|
||||||
|
|
||||||
|
async fn run_step<S: StepBody + Default + 'static>(
|
||||||
|
data: serde_json::Value,
|
||||||
|
) -> wfe_core::models::WorkflowInstance {
|
||||||
|
let host = crate::workflows::host::create_test_host().await.unwrap();
|
||||||
|
host.register_step::<S>().await;
|
||||||
|
let def = WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<S>()
|
||||||
|
.name("test-step")
|
||||||
|
.end_workflow()
|
||||||
|
.build("test-wf", 1);
|
||||||
|
host.register_workflow_definition(def).await;
|
||||||
|
let instance = run_workflow_sync(&host, "test-wf", 1, data, Duration::from_secs(5))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
host.stop().await;
|
||||||
|
instance
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_wait_pod_running_skip_seed() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": true });
|
||||||
|
let instance = run_step::<WaitPodRunning>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_wait_pod_running_no_ob_pod() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false });
|
||||||
|
let instance = run_step::<WaitPodRunning>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_wait_pod_running_ob_pod_none_explicit() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false, "ob_pod": null });
|
||||||
|
let instance = run_step::<WaitPodRunning>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_init_or_unseal_skip_seed() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": true });
|
||||||
|
let instance = run_step::<InitOrUnsealOpenBao>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_init_or_unseal_no_ob_pod() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false });
|
||||||
|
let instance = run_step::<InitOrUnsealOpenBao>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_init_or_unseal_ob_pod_none() {
|
||||||
|
let data = serde_json::json!({ "skip_seed": false, "ob_pod": null });
|
||||||
|
let instance = run_step::<InitOrUnsealOpenBao>(data).await;
|
||||||
|
assert_eq!(instance.status, WorkflowStatus::Complete);
|
||||||
|
}
|
||||||
|
}
|
||||||
388
src/workflows/up/definition.rs
Normal file
388
src/workflows/up/definition.rs
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
//! Up workflow definition — phased deployment with parallel branches.
|
||||||
|
|
||||||
|
use serde_json::json;
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::WorkflowDefinition;
|
||||||
|
|
||||||
|
use super::steps;
|
||||||
|
use crate::workflows::primitives::{
|
||||||
|
ApplyManifest, CollectCredentials, CreateK8sSecret, CreatePGDatabase, CreatePGRole,
|
||||||
|
EnableVaultAuth, EnsureNamespace, EnsureOpenSearchML, InjectOpenSearchModelId,
|
||||||
|
SeedKVPath, WaitForRollout, WriteKVPath,
|
||||||
|
WriteVaultAuthConfig, WriteVaultPolicy, WriteVaultRole,
|
||||||
|
};
|
||||||
|
use crate::workflows::primitives::kv_service_configs;
|
||||||
|
use crate::workflows::seed::steps::postgres::pg_db_map;
|
||||||
|
|
||||||
|
/// Build the up workflow definition.
|
||||||
|
pub fn build() -> WorkflowDefinition {
|
||||||
|
WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
// ── Phase 1: Infrastructure ────────────────────────────────────
|
||||||
|
.start_with::<steps::EnsureCilium>()
|
||||||
|
.name("ensure-cilium")
|
||||||
|
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-longhorn",
|
||||||
|
Some(json!({"namespace": "longhorn-system"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-monitoring",
|
||||||
|
Some(json!({"namespace": "monitoring"})));
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
.then::<ApplyManifest>()
|
||||||
|
.name("apply-data")
|
||||||
|
.config(json!({"namespace": "data"}))
|
||||||
|
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<steps::EnsureBuildKit>("ensure-buildkit", None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let id0 = b.add_step_typed::<steps::EnsureTLSCert>("ensure-tls-cert", None);
|
||||||
|
let id1 = b.add_step_typed::<steps::EnsureTLSSecret>("ensure-tls-secret", None);
|
||||||
|
let id2 = b.add_step_typed::<ApplyManifest>("apply-cert-manager",
|
||||||
|
Some(json!({"namespace": "cert-manager"})));
|
||||||
|
b.wire_outcome(id0, id1, None);
|
||||||
|
b.wire_outcome(id1, id2, None);
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// ── Phase 2: OpenBao init (sequential) ────────────────────────
|
||||||
|
.then::<steps::FindOpenBaoPod>()
|
||||||
|
.name("find-openbao-pod")
|
||||||
|
.then::<steps::WaitPodRunning>()
|
||||||
|
.name("wait-pod-running")
|
||||||
|
.then::<steps::InitOrUnsealOpenBao>()
|
||||||
|
.name("init-or-unseal-openbao")
|
||||||
|
|
||||||
|
// ── Phase 3: KV seeding (parallel per-service) ────────────────
|
||||||
|
.parallel(|p| {
|
||||||
|
let mut p = p;
|
||||||
|
for cfg in kv_service_configs::all_service_configs() {
|
||||||
|
let service = cfg["service"].as_str().unwrap().to_string();
|
||||||
|
p = p.branch(|b| {
|
||||||
|
let seed_id = b.add_step_typed::<SeedKVPath>(
|
||||||
|
&format!("seed-{service}"), Some(cfg.clone()));
|
||||||
|
let write_id = b.add_step_typed::<WriteKVPath>(
|
||||||
|
&format!("write-{service}"), Some(json!({"service": &service})));
|
||||||
|
b.wire_outcome(seed_id, write_id, None);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
// kratos-admin depends on seaweedfs (from_creds reference)
|
||||||
|
p.branch(|b| {
|
||||||
|
let seed_id = b.add_step_typed::<SeedKVPath>(
|
||||||
|
"seed-kratos-admin", Some(kv_service_configs::kratos_admin_config()));
|
||||||
|
let write_id = b.add_step_typed::<WriteKVPath>(
|
||||||
|
"write-kratos-admin", Some(json!({"service": "kratos-admin"})));
|
||||||
|
b.wire_outcome(seed_id, write_id, None);
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.then::<CollectCredentials>()
|
||||||
|
.name("collect-credentials")
|
||||||
|
// ── Phase 3b: Vault auth (4 atomic steps) ──────────────────
|
||||||
|
.then::<EnableVaultAuth>()
|
||||||
|
.name("enable-k8s-auth")
|
||||||
|
.config(json!({"mount": "kubernetes", "type": "kubernetes"}))
|
||||||
|
.then::<WriteVaultAuthConfig>()
|
||||||
|
.name("write-k8s-auth-config")
|
||||||
|
.config(json!({"mount": "kubernetes", "config": {
|
||||||
|
"kubernetes_host": "https://kubernetes.default.svc.cluster.local"
|
||||||
|
}}))
|
||||||
|
.then::<WriteVaultPolicy>()
|
||||||
|
.name("write-vso-policy")
|
||||||
|
.config(json!({"name": "vso-reader", "hcl": concat!(
|
||||||
|
"path \"secret/data/*\" { capabilities = [\"read\"] }\n",
|
||||||
|
"path \"secret/metadata/*\" { capabilities = [\"read\", \"list\"] }\n",
|
||||||
|
"path \"database/static-creds/*\" { capabilities = [\"read\"] }\n",
|
||||||
|
)}))
|
||||||
|
.then::<WriteVaultRole>()
|
||||||
|
.name("write-vso-role")
|
||||||
|
.config(json!({"mount": "kubernetes", "role": "vso", "config": {
|
||||||
|
"bound_service_account_names": "default",
|
||||||
|
"bound_service_account_namespaces": "ory,devtools,storage,lasuite,stalwart,matrix,media,data,monitoring,cert-manager",
|
||||||
|
"policies": "vso-reader",
|
||||||
|
"ttl": "1h"
|
||||||
|
}}))
|
||||||
|
|
||||||
|
// ── Phase 4: PostgreSQL ───────────────────────────────────────
|
||||||
|
.then::<steps::WaitForPostgres>()
|
||||||
|
.name("wait-for-postgres")
|
||||||
|
|
||||||
|
.parallel(|p| {
|
||||||
|
let db_map = pg_db_map();
|
||||||
|
let mut p = p;
|
||||||
|
for (user, db) in &db_map {
|
||||||
|
p = p.branch(|b| {
|
||||||
|
let role_id = b.add_step_typed::<CreatePGRole>(
|
||||||
|
&format!("pg-role-{user}"),
|
||||||
|
Some(json!({"username": user})),
|
||||||
|
);
|
||||||
|
let db_id = b.add_step_typed::<CreatePGDatabase>(
|
||||||
|
&format!("pg-db-{db}"),
|
||||||
|
Some(json!({"dbname": db, "owner": user})),
|
||||||
|
);
|
||||||
|
b.wire_outcome(role_id, db_id, None);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
p
|
||||||
|
})
|
||||||
|
|
||||||
|
.then::<steps::ConfigureDatabaseEngine>()
|
||||||
|
.name("configure-database-engine")
|
||||||
|
|
||||||
|
// ── Phase 5: Platform manifests ───────────────────────────────
|
||||||
|
.then::<ApplyManifest>()
|
||||||
|
.name("apply-vso")
|
||||||
|
.config(json!({"namespace": "vault-secrets-operator"}))
|
||||||
|
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-ingress",
|
||||||
|
Some(json!({"namespace": "ingress"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-ory",
|
||||||
|
Some(json!({"namespace": "ory"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-devtools",
|
||||||
|
Some(json!({"namespace": "devtools"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-storage",
|
||||||
|
Some(json!({"namespace": "storage"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-media",
|
||||||
|
Some(json!({"namespace": "media"})));
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// ── Phase 6: K8s secrets (parallel by namespace) ──────────────
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-ory",
|
||||||
|
Some(json!({"namespace": "ory"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-hydra",
|
||||||
|
Some(json!({"namespace":"ory","name":"hydra","data":{
|
||||||
|
"secretsSystem":"hydra-system-secret",
|
||||||
|
"secretsCookie":"hydra-cookie-secret",
|
||||||
|
"pairwise-salt":"hydra-pairwise-salt"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-kratos-app",
|
||||||
|
Some(json!({"namespace":"ory","name":"kratos-app-secrets","data":{
|
||||||
|
"secretsDefault":"kratos-secrets-default",
|
||||||
|
"secretsCookie":"kratos-secrets-cookie"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-devtools",
|
||||||
|
Some(json!({"namespace": "devtools"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-gitea-s3",
|
||||||
|
Some(json!({"namespace":"devtools","name":"gitea-s3-credentials","data":{
|
||||||
|
"access-key":"s3-access-key",
|
||||||
|
"secret-key":"s3-secret-key"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-gitea-admin",
|
||||||
|
Some(json!({"namespace":"devtools","name":"gitea-admin-credentials","data":{
|
||||||
|
"username":"literal:gitea_admin",
|
||||||
|
"password":"gitea-admin-password"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-storage",
|
||||||
|
Some(json!({"namespace": "storage"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-seaweedfs-s3-creds",
|
||||||
|
Some(json!({"namespace":"storage","name":"seaweedfs-s3-credentials","data":{
|
||||||
|
"S3_ACCESS_KEY":"s3-access-key",
|
||||||
|
"S3_SECRET_KEY":"s3-secret-key"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-seaweedfs-s3-json",
|
||||||
|
Some(json!({"namespace":"storage","name":"seaweedfs-s3-json","data":{
|
||||||
|
"s3.json":"s3_json"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
let ns = b.add_step_typed::<EnsureNamespace>("ensure-ns-lasuite",
|
||||||
|
Some(json!({"namespace": "lasuite"})));
|
||||||
|
let s1 = b.add_step_typed::<CreateK8sSecret>("secret-lasuite-s3",
|
||||||
|
Some(json!({"namespace":"lasuite","name":"seaweedfs-s3-credentials","data":{
|
||||||
|
"S3_ACCESS_KEY":"s3-access-key",
|
||||||
|
"S3_SECRET_KEY":"s3-secret-key"
|
||||||
|
}})));
|
||||||
|
let s2 = b.add_step_typed::<CreateK8sSecret>("secret-hive-oidc",
|
||||||
|
Some(json!({"namespace":"lasuite","name":"hive-oidc","data":{
|
||||||
|
"client-id":"hive-oidc-client-id",
|
||||||
|
"client-secret":"hive-oidc-client-secret"
|
||||||
|
}})));
|
||||||
|
let s3 = b.add_step_typed::<CreateK8sSecret>("secret-people-django",
|
||||||
|
Some(json!({"namespace":"lasuite","name":"people-django-secret","data":{
|
||||||
|
"DJANGO_SECRET_KEY":"people-django-secret"
|
||||||
|
}})));
|
||||||
|
b.wire_outcome(ns, s1, None);
|
||||||
|
b.wire_outcome(s1, s2, None);
|
||||||
|
b.wire_outcome(s2, s3, None);
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<EnsureNamespace>("ensure-ns-matrix",
|
||||||
|
Some(json!({"namespace": "matrix"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<EnsureNamespace>("ensure-ns-media",
|
||||||
|
Some(json!({"namespace": "media"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<EnsureNamespace>("ensure-ns-monitoring",
|
||||||
|
Some(json!({"namespace": "monitoring"})));
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
.then::<steps::SyncGiteaAdminPassword>()
|
||||||
|
.name("sync-gitea-admin-password")
|
||||||
|
.then::<steps::BootstrapGitea>()
|
||||||
|
.name("bootstrap-gitea")
|
||||||
|
|
||||||
|
// ── Phase 7: Application manifests ────────────────────────────
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-lasuite",
|
||||||
|
Some(json!({"namespace": "lasuite"})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<ApplyManifest>("apply-matrix",
|
||||||
|
Some(json!({"namespace": "matrix"})));
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// ── Phase 8: Core rollouts + OpenSearch ML (parallel) ─────────
|
||||||
|
.parallel(|p| p
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<WaitForRollout>("wait-valkey",
|
||||||
|
Some(json!({"namespace": "data", "deployment": "valkey", "timeout_secs": 120})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<WaitForRollout>("wait-kratos",
|
||||||
|
Some(json!({"namespace": "ory", "deployment": "kratos", "timeout_secs": 120})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
b.add_step_typed::<WaitForRollout>("wait-hydra",
|
||||||
|
Some(json!({"namespace": "ory", "deployment": "hydra", "timeout_secs": 120})));
|
||||||
|
})
|
||||||
|
.branch(|b| {
|
||||||
|
// OpenSearch ML model download/deploy — can take 10+ min on first run.
|
||||||
|
// Runs alongside rollout waits so it doesn't block the pipeline.
|
||||||
|
b.add_step_typed::<EnsureOpenSearchML>("ensure-opensearch-ml", None);
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
.then::<InjectOpenSearchModelId>()
|
||||||
|
.name("inject-opensearch-model-id")
|
||||||
|
.then::<steps::PrintURLs>()
|
||||||
|
.name("print-urls")
|
||||||
|
.end_workflow()
|
||||||
|
.build("up", 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_returns_valid_definition() {
|
||||||
|
let def = build();
|
||||||
|
assert_eq!(def.id, "up");
|
||||||
|
assert_eq!(def.version, 2);
|
||||||
|
assert!(def.steps.len() > 20, "expected >20 steps, got {}", def.steps.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_first_step_is_ensure_cilium() {
|
||||||
|
let def = build();
|
||||||
|
assert_eq!(def.steps[0].name, Some("ensure-cilium".into()));
|
||||||
|
assert!(def.steps[0].step_type.contains("EnsureCilium"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_last_step_is_print_urls() {
|
||||||
|
let def = build();
|
||||||
|
let last = def.steps.last().unwrap();
|
||||||
|
assert_eq!(last.name, Some("print-urls".into()));
|
||||||
|
assert!(last.step_type.contains("PrintURLs"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_apply_manifest_steps_have_config() {
|
||||||
|
let def = build();
|
||||||
|
let apply_steps: Vec<_> = def.steps.iter()
|
||||||
|
.filter(|s| s.step_type.contains("ApplyManifest"))
|
||||||
|
.collect();
|
||||||
|
assert!(!apply_steps.is_empty(), "should have ApplyManifest steps");
|
||||||
|
for s in &apply_steps {
|
||||||
|
let config = s.step_config.as_ref()
|
||||||
|
.unwrap_or_else(|| panic!("ApplyManifest step {:?} missing config", s.name));
|
||||||
|
assert!(config.get("namespace").is_some(),
|
||||||
|
"ApplyManifest step {:?} missing namespace in config", s.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wait_for_rollout_steps_have_config() {
|
||||||
|
let def = build();
|
||||||
|
let rollout_steps: Vec<_> = def.steps.iter()
|
||||||
|
.filter(|s| s.step_type.contains("WaitForRollout"))
|
||||||
|
.collect();
|
||||||
|
assert_eq!(rollout_steps.len(), 3, "should have 3 WaitForRollout steps");
|
||||||
|
for s in &rollout_steps {
|
||||||
|
let config = s.step_config.as_ref().unwrap();
|
||||||
|
assert!(config.get("namespace").is_some());
|
||||||
|
assert!(config.get("deployment").is_some());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_has_parallel_containers() {
|
||||||
|
let def = build();
|
||||||
|
let seq_steps: Vec<_> = def.steps.iter()
|
||||||
|
.filter(|s| s.step_type.contains("SequenceStep"))
|
||||||
|
.collect();
|
||||||
|
assert!(seq_steps.len() >= 4, "expected >=4 parallel blocks, got {}", seq_steps.len());
|
||||||
|
for s in &seq_steps {
|
||||||
|
assert!(!s.children.is_empty(), "parallel container should have children");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_non_container_steps_have_names() {
|
||||||
|
let def = build();
|
||||||
|
for s in &def.steps {
|
||||||
|
// SequenceStep containers are auto-generated by .parallel()
|
||||||
|
if s.step_type.contains("SequenceStep") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
assert!(s.name.is_some(), "step {} ({}) has no name", s.id, s.step_type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cert_branch_has_chained_outcomes() {
|
||||||
|
let def = build();
|
||||||
|
let tls_cert = def.steps.iter()
|
||||||
|
.find(|s| s.name.as_deref() == Some("ensure-tls-cert"))
|
||||||
|
.expect("should have ensure-tls-cert step");
|
||||||
|
assert!(!tls_cert.outcomes.is_empty(), "ensure-tls-cert should wire to ensure-tls-secret");
|
||||||
|
|
||||||
|
let tls_secret = def.steps.iter()
|
||||||
|
.find(|s| s.name.as_deref() == Some("ensure-tls-secret"))
|
||||||
|
.expect("should have ensure-tls-secret step");
|
||||||
|
assert!(!tls_secret.outcomes.is_empty(), "ensure-tls-secret should wire to apply-cert-manager");
|
||||||
|
}
|
||||||
|
}
|
||||||
106
src/workflows/up/mod.rs
Normal file
106
src/workflows/up/mod.rs
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
//! Up workflow — orchestrates full cluster bring-up as composable steps.
|
||||||
|
|
||||||
|
pub mod definition;
|
||||||
|
pub mod steps;
|
||||||
|
|
||||||
|
use crate::output;
|
||||||
|
|
||||||
|
/// Register all up workflow steps and the workflow definition with a host.
|
||||||
|
pub async fn register(host: &wfe::WorkflowHost) {
|
||||||
|
// Primitive steps (config-driven, reusable)
|
||||||
|
host.register_step::<crate::workflows::primitives::ApplyManifest>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WaitForRollout>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CreatePGRole>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CreatePGDatabase>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::EnsureNamespace>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CreateK8sSecret>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::EnableVaultAuth>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteVaultAuthConfig>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteVaultPolicy>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteVaultRole>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::SeedKVPath>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::WriteKVPath>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::CollectCredentials>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::EnsureOpenSearchML>().await;
|
||||||
|
host.register_step::<crate::workflows::primitives::InjectOpenSearchModelId>().await;
|
||||||
|
|
||||||
|
// Steps unique to up
|
||||||
|
host.register_step::<steps::EnsureCilium>().await;
|
||||||
|
host.register_step::<steps::EnsureBuildKit>().await;
|
||||||
|
host.register_step::<steps::EnsureTLSCert>().await;
|
||||||
|
host.register_step::<steps::EnsureTLSSecret>().await;
|
||||||
|
host.register_step::<steps::BootstrapGitea>().await;
|
||||||
|
host.register_step::<steps::PrintURLs>().await;
|
||||||
|
|
||||||
|
// Steps shared from seed workflow
|
||||||
|
host.register_step::<steps::FindOpenBaoPod>().await;
|
||||||
|
host.register_step::<steps::WaitPodRunning>().await;
|
||||||
|
host.register_step::<steps::InitOrUnsealOpenBao>().await;
|
||||||
|
host.register_step::<steps::WaitForPostgres>().await;
|
||||||
|
host.register_step::<steps::ConfigureDatabaseEngine>().await;
|
||||||
|
host.register_step::<steps::SyncGiteaAdminPassword>().await;
|
||||||
|
|
||||||
|
// Register workflow definition
|
||||||
|
host.register_workflow_definition(definition::build()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a summary of the completed up workflow.
|
||||||
|
pub fn print_summary(instance: &wfe_core::models::WorkflowInstance) {
|
||||||
|
output::step("Up workflow summary:");
|
||||||
|
for ep in &instance.execution_pointers {
|
||||||
|
let fallback = format!("step-{}", ep.step_id);
|
||||||
|
let name = ep.step_name.as_deref().unwrap_or(&fallback);
|
||||||
|
let status = format!("{:?}", ep.status);
|
||||||
|
let duration = match (ep.start_time, ep.end_time) {
|
||||||
|
(Some(start), Some(end)) => {
|
||||||
|
let d = end - start;
|
||||||
|
format!("{}ms", d.num_milliseconds())
|
||||||
|
}
|
||||||
|
_ => "-".to_string(),
|
||||||
|
};
|
||||||
|
output::ok(&format!(" {name:<40} {status:<12} {duration}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_register_all_steps_and_definition() {
|
||||||
|
let host = crate::workflows::host::create_test_host().await.unwrap();
|
||||||
|
register(&host).await;
|
||||||
|
|
||||||
|
let def = definition::build();
|
||||||
|
assert!(def.steps.len() > 20);
|
||||||
|
assert_eq!(def.id, "up");
|
||||||
|
|
||||||
|
host.stop().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_summary_with_missing_step_names() {
|
||||||
|
let mut instance =
|
||||||
|
wfe_core::models::WorkflowInstance::new("test", 1, serde_json::json!({}));
|
||||||
|
let mut ep = wfe_core::models::ExecutionPointer::new(0);
|
||||||
|
ep.step_name = None;
|
||||||
|
ep.status = wfe_core::models::PointerStatus::Complete;
|
||||||
|
ep.start_time = Some(chrono::Utc::now());
|
||||||
|
ep.end_time = Some(chrono::Utc::now());
|
||||||
|
instance.execution_pointers.push(ep);
|
||||||
|
print_summary(&instance);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_print_summary_with_missing_times() {
|
||||||
|
let mut instance =
|
||||||
|
wfe_core::models::WorkflowInstance::new("test", 1, serde_json::json!({}));
|
||||||
|
let mut ep = wfe_core::models::ExecutionPointer::new(0);
|
||||||
|
ep.step_name = Some("test-step".to_string());
|
||||||
|
ep.status = wfe_core::models::PointerStatus::Complete;
|
||||||
|
ep.start_time = None;
|
||||||
|
ep.end_time = None;
|
||||||
|
instance.execution_pointers.push(ep);
|
||||||
|
print_summary(&instance);
|
||||||
|
}
|
||||||
|
}
|
||||||
227
src/workflows/up/steps/certificates.rs
Normal file
227
src/workflows/up/steps/certificates.rs
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
//! Certificate steps: TLS cert generation, TLS secret, cert-manager install.
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::kube as k;
|
||||||
|
use crate::output::{ok, step};
|
||||||
|
use crate::workflows::data::UpData;
|
||||||
|
|
||||||
|
fn secrets_dir() -> std::path::PathBuf {
|
||||||
|
crate::config::get_infra_dir()
|
||||||
|
.join("secrets")
|
||||||
|
.join("local")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── EnsureTLSCert ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Generate a self-signed wildcard TLS certificate if one doesn't exist.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct EnsureTLSCert;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for EnsureTLSCert {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data: UpData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
let domain = resolve_domain(&data)?;
|
||||||
|
|
||||||
|
step("TLS certificate...");
|
||||||
|
|
||||||
|
let dir = secrets_dir();
|
||||||
|
let cert_path = dir.join("tls.crt");
|
||||||
|
let key_path = dir.join("tls.key");
|
||||||
|
|
||||||
|
if cert_path.exists() {
|
||||||
|
ok(&format!("Cert exists. Domain: {domain}"));
|
||||||
|
return Ok(ExecutionResult::next());
|
||||||
|
}
|
||||||
|
|
||||||
|
ok(&format!("Generating wildcard cert for *.{domain}..."));
|
||||||
|
std::fs::create_dir_all(&dir).map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!(
|
||||||
|
"Failed to create secrets dir {}: {e}",
|
||||||
|
dir.display()
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let subject_alt_names = vec![format!("*.{domain}")];
|
||||||
|
let mut params = rcgen::CertificateParams::new(subject_alt_names)
|
||||||
|
.map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!(
|
||||||
|
"Failed to create certificate params: {e}"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
params
|
||||||
|
.distinguished_name
|
||||||
|
.push(rcgen::DnType::CommonName, format!("*.{domain}"));
|
||||||
|
|
||||||
|
let key_pair = rcgen::KeyPair::generate().map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!("Failed to generate key pair: {e}"))
|
||||||
|
})?;
|
||||||
|
let cert = params.self_signed(&key_pair).map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!(
|
||||||
|
"Failed to generate self-signed certificate: {e}"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
std::fs::write(&cert_path, cert.pem()).map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!(
|
||||||
|
"Failed to write {}: {e}",
|
||||||
|
cert_path.display()
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
std::fs::write(&key_path, key_pair.serialize_pem()).map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!(
|
||||||
|
"Failed to write {}: {e}",
|
||||||
|
key_path.display()
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
std::fs::set_permissions(&key_path, std::fs::Permissions::from_mode(0o600))
|
||||||
|
.map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!(
|
||||||
|
"Failed to set key permissions: {e}"
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
ok(&format!("Cert generated. Domain: {domain}"));
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── EnsureTLSSecret ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Apply the TLS secret to the ingress namespace.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct EnsureTLSSecret;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for EnsureTLSSecret {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let _data: UpData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
step("TLS secret...");
|
||||||
|
|
||||||
|
k::ensure_ns("ingress")
|
||||||
|
.await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
let dir = secrets_dir();
|
||||||
|
let cert_pem = std::fs::read_to_string(dir.join("tls.crt")).map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!("Failed to read tls.crt: {e}"))
|
||||||
|
})?;
|
||||||
|
let key_pem = std::fs::read_to_string(dir.join("tls.key")).map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!("Failed to read tls.key: {e}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let client = k::get_client()
|
||||||
|
.await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
let api: kube::api::Api<k8s_openapi::api::core::v1::Secret> =
|
||||||
|
kube::api::Api::namespaced(client.clone(), "ingress");
|
||||||
|
|
||||||
|
let b64_cert = base64::Engine::encode(
|
||||||
|
&base64::engine::general_purpose::STANDARD,
|
||||||
|
cert_pem.as_bytes(),
|
||||||
|
);
|
||||||
|
let b64_key = base64::Engine::encode(
|
||||||
|
&base64::engine::general_purpose::STANDARD,
|
||||||
|
key_pem.as_bytes(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let secret_obj = serde_json::json!({
|
||||||
|
"apiVersion": "v1",
|
||||||
|
"kind": "Secret",
|
||||||
|
"metadata": {
|
||||||
|
"name": "pingora-tls",
|
||||||
|
"namespace": "ingress",
|
||||||
|
},
|
||||||
|
"type": "kubernetes.io/tls",
|
||||||
|
"data": {
|
||||||
|
"tls.crt": b64_cert,
|
||||||
|
"tls.key": b64_key,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let pp = kube::api::PatchParams::apply("sunbeam").force();
|
||||||
|
api.patch("pingora-tls", &pp, &kube::api::Patch::Apply(secret_obj))
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
wfe_core::WfeError::StepExecution(format!("Failed to create TLS secret: {e}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
ok("Done.");
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Helpers ─────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
fn resolve_domain(data: &UpData) -> wfe_core::Result<String> {
|
||||||
|
if !data.domain.is_empty() {
|
||||||
|
return Ok(data.domain.clone());
|
||||||
|
}
|
||||||
|
if let Some(ctx) = &data.ctx {
|
||||||
|
if !ctx.domain.is_empty() {
|
||||||
|
return Ok(ctx.domain.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(wfe_core::WfeError::StepExecution(
|
||||||
|
"domain not resolved".into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use crate::cluster::CERT_MANAGER_URL;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn secrets_dir_ends_with_secrets_local() {
|
||||||
|
let dir = secrets_dir();
|
||||||
|
assert!(
|
||||||
|
dir.ends_with("secrets/local"),
|
||||||
|
"secrets_dir() should end with secrets/local, got: {}",
|
||||||
|
dir.display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cert_manager_url_points_to_github_release() {
|
||||||
|
assert!(CERT_MANAGER_URL.starts_with("https://github.com/cert-manager/cert-manager/"));
|
||||||
|
assert!(CERT_MANAGER_URL.contains("/releases/download/"));
|
||||||
|
assert!(CERT_MANAGER_URL.ends_with(".yaml"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cert_manager_url_has_version() {
|
||||||
|
assert!(
|
||||||
|
CERT_MANAGER_URL.contains("/v1."),
|
||||||
|
"CERT_MANAGER_URL should reference a v1.x release"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_tls_cert_is_default() {
|
||||||
|
let _ = EnsureTLSCert::default();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_tls_secret_is_default() {
|
||||||
|
let _ = EnsureTLSSecret::default();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
4
src/workflows/up/steps/database.rs
Normal file
4
src/workflows/up/steps/database.rs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
//! Database steps unique to up.
|
||||||
|
//!
|
||||||
|
//! WaitForPostgres and ConfigureDatabaseEngine are shared from seed::steps::postgres.
|
||||||
|
//! ApplyVSO is now handled by the ApplyManifest primitive.
|
||||||
90
src/workflows/up/steps/finalize.rs
Normal file
90
src/workflows/up/steps/finalize.rs
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
//! Finalize steps: print URLs.
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::constants::GITEA_ADMIN_USER;
|
||||||
|
use crate::workflows::data::UpData;
|
||||||
|
|
||||||
|
fn resolve_domain(data: &UpData) -> String {
|
||||||
|
if !data.domain.is_empty() {
|
||||||
|
return data.domain.clone();
|
||||||
|
}
|
||||||
|
if let Some(ctx) = &data.ctx {
|
||||||
|
if !ctx.domain.is_empty() {
|
||||||
|
return ctx.domain.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print service URLs.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PrintURLs;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for PrintURLs {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data: UpData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
let domain = resolve_domain(&data);
|
||||||
|
|
||||||
|
let sep = "\u{2500}".repeat(60);
|
||||||
|
println!("\n{sep}");
|
||||||
|
println!(" Stack is up. Domain: {domain}");
|
||||||
|
println!("{sep}");
|
||||||
|
|
||||||
|
let urls: &[(&str, String)] = &[
|
||||||
|
("Auth", format!("https://auth.{domain}/")),
|
||||||
|
("Docs", format!("https://docs.{domain}/")),
|
||||||
|
("Meet", format!("https://meet.{domain}/")),
|
||||||
|
("Drive", format!("https://drive.{domain}/")),
|
||||||
|
("Chat", format!("https://chat.{domain}/")),
|
||||||
|
("Mail", format!("https://mail.{domain}/")),
|
||||||
|
("People", format!("https://people.{domain}/")),
|
||||||
|
(
|
||||||
|
"Gitea",
|
||||||
|
format!(
|
||||||
|
"https://src.{domain}/ ({GITEA_ADMIN_USER} / <from openbao>)"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (name, url) in urls {
|
||||||
|
println!(" {name:<10} {url}");
|
||||||
|
}
|
||||||
|
|
||||||
|
println!();
|
||||||
|
println!(" OpenBao UI:");
|
||||||
|
println!(" kubectl --context=sunbeam -n data port-forward svc/openbao 8200:8200");
|
||||||
|
println!(" http://localhost:8200");
|
||||||
|
println!(
|
||||||
|
" token: kubectl --context=sunbeam -n data get secret openbao-keys \
|
||||||
|
-o jsonpath='{{.data.root-token}}' | base64 -d"
|
||||||
|
);
|
||||||
|
println!("{sep}\n");
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn print_urls_is_default() { let _ = PrintURLs::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn resolve_domain_with_domain_set() {
|
||||||
|
let data = UpData {
|
||||||
|
domain: "sunbeam.pt".into(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
assert_eq!(resolve_domain(&data), "sunbeam.pt");
|
||||||
|
}
|
||||||
|
}
|
||||||
117
src/workflows/up/steps/infrastructure.rs
Normal file
117
src/workflows/up/steps/infrastructure.rs
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
//! Infrastructure steps: Cilium check, buildkit check.
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::kube as k;
|
||||||
|
use crate::output::{ok, step, warn};
|
||||||
|
use crate::workflows::data::UpData;
|
||||||
|
|
||||||
|
// ── EnsureCilium ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Verify Cilium CNI pods are running in kube-system. Warn if missing, don't fail.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct EnsureCilium;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for EnsureCilium {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data: UpData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
let step_ctx = data
|
||||||
|
.ctx
|
||||||
|
.as_ref()
|
||||||
|
.ok_or_else(|| wfe_core::WfeError::StepExecution("missing __ctx".into()))?;
|
||||||
|
|
||||||
|
// Initialize kube context for the rest of the workflow
|
||||||
|
k::set_context(&step_ctx.kube_context, &step_ctx.ssh_host);
|
||||||
|
|
||||||
|
step("Cilium CNI...");
|
||||||
|
|
||||||
|
let client = k::get_client()
|
||||||
|
.await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
let found = check_cilium_pods(client, "kube-system").await
|
||||||
|
|| check_cilium_pods(client, "cilium-system").await;
|
||||||
|
|
||||||
|
if found {
|
||||||
|
ok("Cilium is healthy.");
|
||||||
|
} else {
|
||||||
|
warn("Cilium pods not found. CNI should be installed at the infrastructure level.");
|
||||||
|
warn("Continuing anyway -- networking may not work correctly.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve domain if empty and store in workflow data
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
if data.domain.is_empty() {
|
||||||
|
let domain = k::get_domain()
|
||||||
|
.await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
result.output_data = Some(serde_json::json!({ "domain": domain }));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn check_cilium_pods(client: &kube::Client, ns: &str) -> bool {
|
||||||
|
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
||||||
|
kube::Api::namespaced(client.clone(), ns);
|
||||||
|
let lp = kube::api::ListParams::default().labels("k8s-app=cilium");
|
||||||
|
match pods.list(&lp).await {
|
||||||
|
Ok(list) => !list.items.is_empty(),
|
||||||
|
Err(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── EnsureBuildKit ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Check buildkit pods, warn if not present.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct EnsureBuildKit;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for EnsureBuildKit {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let _data: UpData = serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
|
||||||
|
step("BuildKit...");
|
||||||
|
|
||||||
|
let client = k::get_client()
|
||||||
|
.await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
||||||
|
kube::Api::namespaced(client.clone(), "buildkit");
|
||||||
|
let lp = kube::api::ListParams::default();
|
||||||
|
match pods.list(&lp).await {
|
||||||
|
Ok(list) if !list.items.is_empty() => ok("BuildKit is present."),
|
||||||
|
_ => warn("BuildKit pods not found -- image builds may not work."),
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_cilium_is_default() {
|
||||||
|
let _ = EnsureCilium::default();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ensure_buildkit_is_default() {
|
||||||
|
let _ = EnsureBuildKit::default();
|
||||||
|
}
|
||||||
|
}
|
||||||
22
src/workflows/up/steps/mod.rs
Normal file
22
src/workflows/up/steps/mod.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
//! Up workflow steps — each module contains one or more WFE step structs.
|
||||||
|
|
||||||
|
pub mod certificates;
|
||||||
|
pub mod database;
|
||||||
|
pub mod finalize;
|
||||||
|
pub mod infrastructure;
|
||||||
|
pub mod platform;
|
||||||
|
pub mod vault;
|
||||||
|
|
||||||
|
// Steps unique to the up workflow
|
||||||
|
pub use certificates::{EnsureTLSCert, EnsureTLSSecret};
|
||||||
|
pub use finalize::PrintURLs;
|
||||||
|
pub use infrastructure::{EnsureBuildKit, EnsureCilium};
|
||||||
|
pub use platform::BootstrapGitea;
|
||||||
|
|
||||||
|
// Steps shared from seed workflow (data-struct-agnostic, reusable)
|
||||||
|
pub use crate::workflows::seed::steps::{
|
||||||
|
FindOpenBaoPod, WaitPodRunning, InitOrUnsealOpenBao,
|
||||||
|
WaitForPostgres,
|
||||||
|
ConfigureDatabaseEngine,
|
||||||
|
SyncGiteaAdminPassword,
|
||||||
|
};
|
||||||
31
src/workflows/up/steps/platform.rs
Normal file
31
src/workflows/up/steps/platform.rs
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
//! Platform steps: Gitea bootstrap.
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::output::step;
|
||||||
|
|
||||||
|
/// Run Gitea bootstrap (repos, webhooks, etc.).
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct BootstrapGitea;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for BootstrapGitea {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
step("Gitea bootstrap...");
|
||||||
|
crate::gitea::cmd_bootstrap().await
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))?;
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bootstrap_gitea_is_default() { let _ = BootstrapGitea::default(); }
|
||||||
|
}
|
||||||
5
src/workflows/up/steps/vault.rs
Normal file
5
src/workflows/up/steps/vault.rs
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
//! Vault steps unique to up workflow.
|
||||||
|
//!
|
||||||
|
//! OpenBao init/unseal, KV seeding, K8s auth, and KV writing are all shared
|
||||||
|
//! from seed::steps (openbao_init, kv_seeding). This file is kept for the
|
||||||
|
//! module structure but currently has no unique steps.
|
||||||
79
src/workflows/verify/definition.rs
Normal file
79
src/workflows/verify/definition.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
//! Verify workflow definition — VSO ↔ OpenBao E2E verification.
|
||||||
|
|
||||||
|
use wfe_core::builder::WorkflowBuilder;
|
||||||
|
use wfe_core::models::WorkflowDefinition;
|
||||||
|
|
||||||
|
use super::steps;
|
||||||
|
|
||||||
|
/// Build the verify workflow definition.
|
||||||
|
///
|
||||||
|
/// Steps execute sequentially:
|
||||||
|
/// 1. Find OpenBao pod
|
||||||
|
/// 2. Get root token from K8s secret
|
||||||
|
/// 3. Write sentinel value to OpenBao
|
||||||
|
/// 4. Apply VaultAuth CRD
|
||||||
|
/// 5. Apply VaultStaticSecret CRD
|
||||||
|
/// 6. Wait for VSO to sync
|
||||||
|
/// 7. Check K8s Secret value matches sentinel
|
||||||
|
/// 8. Clean up test resources
|
||||||
|
/// 9. Print result
|
||||||
|
pub fn build() -> WorkflowDefinition {
|
||||||
|
WorkflowBuilder::<serde_json::Value>::new()
|
||||||
|
.start_with::<steps::FindOpenBaoPod>()
|
||||||
|
.name("find-openbao-pod")
|
||||||
|
.then::<steps::GetRootToken>()
|
||||||
|
.name("get-root-token")
|
||||||
|
.then::<steps::WriteSentinel>()
|
||||||
|
.name("write-sentinel")
|
||||||
|
.then::<steps::ApplyVaultAuth>()
|
||||||
|
.name("apply-vault-auth")
|
||||||
|
.then::<steps::ApplyVaultStaticSecret>()
|
||||||
|
.name("apply-vault-static-secret")
|
||||||
|
.then::<steps::WaitForSync>()
|
||||||
|
.name("wait-for-sync")
|
||||||
|
.then::<steps::CheckSecretValue>()
|
||||||
|
.name("check-secret-value")
|
||||||
|
.then::<steps::Cleanup>()
|
||||||
|
.name("cleanup")
|
||||||
|
.then::<steps::PrintResult>()
|
||||||
|
.name("print-result")
|
||||||
|
.end_workflow()
|
||||||
|
.build("verify", 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_returns_valid_definition() {
|
||||||
|
let def = build();
|
||||||
|
assert_eq!(def.id, "verify");
|
||||||
|
assert_eq!(def.version, 1);
|
||||||
|
assert_eq!(def.steps.len(), 9);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_build_step_names() {
|
||||||
|
let def = build();
|
||||||
|
let names: Vec<Option<&str>> = def
|
||||||
|
.steps
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.name.as_deref())
|
||||||
|
.collect();
|
||||||
|
assert_eq!(
|
||||||
|
names,
|
||||||
|
vec![
|
||||||
|
Some("find-openbao-pod"),
|
||||||
|
Some("get-root-token"),
|
||||||
|
Some("write-sentinel"),
|
||||||
|
Some("apply-vault-auth"),
|
||||||
|
Some("apply-vault-static-secret"),
|
||||||
|
Some("wait-for-sync"),
|
||||||
|
Some("check-secret-value"),
|
||||||
|
Some("cleanup"),
|
||||||
|
Some("print-result"),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
39
src/workflows/verify/mod.rs
Normal file
39
src/workflows/verify/mod.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
//! Verify workflow — VSO ↔ OpenBao end-to-end verification.
|
||||||
|
|
||||||
|
pub mod definition;
|
||||||
|
pub mod steps;
|
||||||
|
|
||||||
|
use crate::output;
|
||||||
|
|
||||||
|
/// Register all verify workflow steps and the workflow definition with a host.
|
||||||
|
pub async fn register(host: &wfe::WorkflowHost) {
|
||||||
|
host.register_step::<steps::FindOpenBaoPod>().await;
|
||||||
|
host.register_step::<steps::GetRootToken>().await;
|
||||||
|
host.register_step::<steps::WriteSentinel>().await;
|
||||||
|
host.register_step::<steps::ApplyVaultAuth>().await;
|
||||||
|
host.register_step::<steps::ApplyVaultStaticSecret>().await;
|
||||||
|
host.register_step::<steps::WaitForSync>().await;
|
||||||
|
host.register_step::<steps::CheckSecretValue>().await;
|
||||||
|
host.register_step::<steps::Cleanup>().await;
|
||||||
|
host.register_step::<steps::PrintResult>().await;
|
||||||
|
|
||||||
|
host.register_workflow_definition(definition::build()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print a summary of the completed verify workflow.
|
||||||
|
pub fn print_summary(instance: &wfe_core::models::WorkflowInstance) {
|
||||||
|
output::step("Verify workflow summary:");
|
||||||
|
for ep in &instance.execution_pointers {
|
||||||
|
let fallback = format!("step-{}", ep.step_id);
|
||||||
|
let name = ep.step_name.as_deref().unwrap_or(&fallback);
|
||||||
|
let status = format!("{:?}", ep.status);
|
||||||
|
let duration = match (ep.start_time, ep.end_time) {
|
||||||
|
(Some(start), Some(end)) => {
|
||||||
|
let d = end - start;
|
||||||
|
format!("{}ms", d.num_milliseconds())
|
||||||
|
}
|
||||||
|
_ => "-".to_string(),
|
||||||
|
};
|
||||||
|
output::ok(&format!(" {name:<40} {status:<12} {duration}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
15
src/workflows/verify/steps/mod.rs
Normal file
15
src/workflows/verify/steps/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
//! Verify workflow steps — VSO ↔ OpenBao end-to-end verification.
|
||||||
|
|
||||||
|
mod verify;
|
||||||
|
|
||||||
|
pub use verify::{
|
||||||
|
FindOpenBaoPod,
|
||||||
|
GetRootToken,
|
||||||
|
WriteSentinel,
|
||||||
|
ApplyVaultAuth,
|
||||||
|
ApplyVaultStaticSecret,
|
||||||
|
WaitForSync,
|
||||||
|
CheckSecretValue,
|
||||||
|
Cleanup,
|
||||||
|
PrintResult,
|
||||||
|
};
|
||||||
411
src/workflows/verify/steps/verify.rs
Normal file
411
src/workflows/verify/steps/verify.rs
Normal file
@@ -0,0 +1,411 @@
|
|||||||
|
//! Steps for the verify workflow — VSO ↔ OpenBao E2E verification.
|
||||||
|
|
||||||
|
use wfe_core::models::ExecutionResult;
|
||||||
|
use wfe_core::traits::{StepBody, StepExecutionContext};
|
||||||
|
|
||||||
|
use crate::kube as k;
|
||||||
|
use crate::openbao::BaoClient;
|
||||||
|
use crate::output::{ok, warn};
|
||||||
|
use crate::secrets;
|
||||||
|
use crate::workflows::data::VerifyData;
|
||||||
|
|
||||||
|
const TEST_NS: &str = "ory";
|
||||||
|
const TEST_NAME: &str = "vso-verify";
|
||||||
|
|
||||||
|
fn load_data(ctx: &StepExecutionContext<'_>) -> wfe_core::Result<VerifyData> {
|
||||||
|
serde_json::from_value(ctx.workflow.data.clone())
|
||||||
|
.map_err(|e| wfe_core::WfeError::StepExecution(e.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn step_err(msg: impl Into<String>) -> wfe_core::WfeError {
|
||||||
|
wfe_core::WfeError::StepExecution(msg.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── FindOpenBaoPod ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Find the OpenBao server pod by label selector.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct FindOpenBaoPod;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for FindOpenBaoPod {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let step_ctx = data.ctx.as_ref()
|
||||||
|
.ok_or_else(|| step_err("missing __ctx in workflow data"))?;
|
||||||
|
|
||||||
|
k::set_context(&step_ctx.kube_context, &step_ctx.ssh_host);
|
||||||
|
|
||||||
|
let client = k::get_client().await.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
let pods: kube::Api<k8s_openapi::api::core::v1::Pod> =
|
||||||
|
kube::Api::namespaced(client.clone(), "data");
|
||||||
|
let lp = kube::api::ListParams::default()
|
||||||
|
.labels("app.kubernetes.io/name=openbao,component=server");
|
||||||
|
let pod_list = pods.list(&lp).await.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
let ob_pod = pod_list
|
||||||
|
.items
|
||||||
|
.first()
|
||||||
|
.and_then(|p| p.metadata.name.as_deref())
|
||||||
|
.ok_or_else(|| step_err("OpenBao pod not found -- run full bring-up first"))?;
|
||||||
|
|
||||||
|
ok(&format!("OpenBao pod: {ob_pod}"));
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "ob_pod": ob_pod }));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── GetRootToken ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Read the root token from the openbao-keys K8s secret.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct GetRootToken;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for GetRootToken {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let root_token = k::kube_get_secret_field("data", "openbao-keys", "root-token")
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(format!("Could not read openbao-keys secret: {e}")))?;
|
||||||
|
|
||||||
|
ok("Root token retrieved.");
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "root_token": root_token }));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── WriteSentinel ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Write a random test sentinel value to OpenBao secret/vso-test.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct WriteSentinel;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for WriteSentinel {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let ob_pod = data.ob_pod.as_deref()
|
||||||
|
.ok_or_else(|| step_err("ob_pod not set"))?;
|
||||||
|
let root_token = data.root_token.as_deref()
|
||||||
|
.ok_or_else(|| step_err("root_token not set"))?;
|
||||||
|
|
||||||
|
let pf = secrets::port_forward("data", ob_pod, 8200).await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
let bao = BaoClient::with_token(
|
||||||
|
&format!("http://127.0.0.1:{}", pf.local_port),
|
||||||
|
root_token,
|
||||||
|
);
|
||||||
|
|
||||||
|
let test_value = secrets::rand_token_n(16);
|
||||||
|
ok("Writing test sentinel to OpenBao secret/vso-test...");
|
||||||
|
|
||||||
|
let mut kv_data = std::collections::HashMap::new();
|
||||||
|
kv_data.insert("test-key".to_string(), test_value.clone());
|
||||||
|
bao.kv_put("secret", "vso-test", &kv_data).await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "test_value": test_value }));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── ApplyVaultAuth ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Create the VaultAuth CRD for the test.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ApplyVaultAuth;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for ApplyVaultAuth {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
ok(&format!("Creating VaultAuth {TEST_NS}/{TEST_NAME}..."));
|
||||||
|
k::kube_apply(&format!(
|
||||||
|
r#"
|
||||||
|
apiVersion: secrets.hashicorp.com/v1beta1
|
||||||
|
kind: VaultAuth
|
||||||
|
metadata:
|
||||||
|
name: {TEST_NAME}
|
||||||
|
namespace: {TEST_NS}
|
||||||
|
spec:
|
||||||
|
method: kubernetes
|
||||||
|
mount: kubernetes
|
||||||
|
kubernetes:
|
||||||
|
role: vso
|
||||||
|
serviceAccount: default
|
||||||
|
"#
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── ApplyVaultStaticSecret ─────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Create the VaultStaticSecret CRD that VSO will sync.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ApplyVaultStaticSecret;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for ApplyVaultStaticSecret {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
ok(&format!("Creating VaultStaticSecret {TEST_NS}/{TEST_NAME}..."));
|
||||||
|
k::kube_apply(&format!(
|
||||||
|
r#"
|
||||||
|
apiVersion: secrets.hashicorp.com/v1beta1
|
||||||
|
kind: VaultStaticSecret
|
||||||
|
metadata:
|
||||||
|
name: {TEST_NAME}
|
||||||
|
namespace: {TEST_NS}
|
||||||
|
spec:
|
||||||
|
vaultAuthRef: {TEST_NAME}
|
||||||
|
mount: secret
|
||||||
|
type: kv-v2
|
||||||
|
path: vso-test
|
||||||
|
refreshAfter: 10s
|
||||||
|
destination:
|
||||||
|
name: {TEST_NAME}
|
||||||
|
create: true
|
||||||
|
overwrite: true
|
||||||
|
"#
|
||||||
|
))
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── WaitForSync ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Wait for VSO to sync the secret (up to 60s).
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct WaitForSync;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for WaitForSync {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
_ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
ok("Waiting for VSO to sync (up to 60s)...");
|
||||||
|
|
||||||
|
let deadline = tokio::time::Instant::now() + std::time::Duration::from_secs(60);
|
||||||
|
let mut synced = false;
|
||||||
|
|
||||||
|
while tokio::time::Instant::now() < deadline {
|
||||||
|
let (code, mac) = kubectl_jsonpath(
|
||||||
|
TEST_NS,
|
||||||
|
"vaultstaticsecret",
|
||||||
|
TEST_NAME,
|
||||||
|
"{.status.secretMAC}",
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
if code == 0 && !mac.is_empty() && mac != "<none>" {
|
||||||
|
synced = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !synced {
|
||||||
|
let (_, msg) = kubectl_jsonpath(
|
||||||
|
TEST_NS,
|
||||||
|
"vaultstaticsecret",
|
||||||
|
TEST_NAME,
|
||||||
|
"{.status.conditions[0].message}",
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
return Err(step_err(format!(
|
||||||
|
"VSO did not sync within 60s. Last status: {}",
|
||||||
|
if msg.is_empty() { "unknown".to_string() } else { msg }
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut result = ExecutionResult::next();
|
||||||
|
result.output_data = Some(serde_json::json!({ "synced": true }));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── CheckSecretValue ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Verify the K8s Secret contains the expected sentinel value.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct CheckSecretValue;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for CheckSecretValue {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
let test_value = data.test_value.as_deref()
|
||||||
|
.ok_or_else(|| step_err("test_value not set"))?;
|
||||||
|
|
||||||
|
ok("Verifying K8s Secret contents...");
|
||||||
|
|
||||||
|
let secret = k::kube_get_secret(TEST_NS, TEST_NAME)
|
||||||
|
.await
|
||||||
|
.map_err(|e| step_err(e.to_string()))?
|
||||||
|
.ok_or_else(|| step_err(format!("K8s Secret {TEST_NS}/{TEST_NAME} not found")))?;
|
||||||
|
|
||||||
|
let secret_data = secret.data.as_ref()
|
||||||
|
.ok_or_else(|| step_err("Secret has no data"))?;
|
||||||
|
let raw = secret_data.get("test-key")
|
||||||
|
.ok_or_else(|| step_err("Missing key 'test-key' in secret"))?;
|
||||||
|
let actual = String::from_utf8(raw.0.clone())
|
||||||
|
.map_err(|e| step_err(format!("UTF-8 error: {e}")))?;
|
||||||
|
|
||||||
|
if actual != test_value {
|
||||||
|
return Err(step_err(format!(
|
||||||
|
"Value mismatch!\n expected: {:?}\n got: {:?}",
|
||||||
|
test_value, actual
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
ok("Sentinel value matches -- VSO -> OpenBao integration is working.");
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Cleanup ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Clean up all test resources (always runs).
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct Cleanup;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for Cleanup {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
ok("Cleaning up test resources...");
|
||||||
|
|
||||||
|
let _ = secrets::delete_resource(TEST_NS, "vaultstaticsecret", TEST_NAME).await;
|
||||||
|
let _ = secrets::delete_resource(TEST_NS, "vaultauth", TEST_NAME).await;
|
||||||
|
|
||||||
|
// Delete the K8s Secret
|
||||||
|
if let Ok(client) = k::get_client().await {
|
||||||
|
let api: kube::Api<k8s_openapi::api::core::v1::Secret> =
|
||||||
|
kube::Api::namespaced(client.clone(), TEST_NS);
|
||||||
|
let _ = api.delete(TEST_NAME, &kube::api::DeleteParams::default()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the vault KV entry
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
if let (Some(ob_pod), Some(root_token)) = (data.ob_pod.as_deref(), data.root_token.as_deref()) {
|
||||||
|
if let Ok(pf) = secrets::port_forward("data", ob_pod, 8200).await {
|
||||||
|
let bao = BaoClient::with_token(
|
||||||
|
&format!("http://127.0.0.1:{}", pf.local_port),
|
||||||
|
root_token,
|
||||||
|
);
|
||||||
|
let _ = bao.kv_delete("secret", "vso-test").await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── PrintResult ────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Print final verification result.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PrintResult;
|
||||||
|
|
||||||
|
#[async_trait::async_trait]
|
||||||
|
impl StepBody for PrintResult {
|
||||||
|
async fn run(
|
||||||
|
&mut self,
|
||||||
|
ctx: &StepExecutionContext<'_>,
|
||||||
|
) -> wfe_core::Result<ExecutionResult> {
|
||||||
|
let data = load_data(ctx)?;
|
||||||
|
if data.synced {
|
||||||
|
ok("VSO E2E verification passed.");
|
||||||
|
} else {
|
||||||
|
warn("VSO verification did not complete successfully.");
|
||||||
|
}
|
||||||
|
Ok(ExecutionResult::next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Helpers ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async fn kubectl_jsonpath(ns: &str, kind: &str, name: &str, jsonpath: &str) -> (i32, String) {
|
||||||
|
let ctx = format!("--context={}", k::context());
|
||||||
|
let jp = format!("-o=jsonpath={jsonpath}");
|
||||||
|
match tokio::process::Command::new("kubectl")
|
||||||
|
.args([&ctx, "-n", ns, "get", kind, name, &jp, "--ignore-not-found"])
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(output) => {
|
||||||
|
let code = output.status.code().unwrap_or(1);
|
||||||
|
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||||
|
(code, stdout)
|
||||||
|
}
|
||||||
|
Err(_) => (1, String::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn find_openbao_pod_is_default() { let _ = FindOpenBaoPod::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn get_root_token_is_default() { let _ = GetRootToken::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn write_sentinel_is_default() { let _ = WriteSentinel::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_vault_auth_is_default() { let _ = ApplyVaultAuth::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn apply_vault_static_secret_is_default() { let _ = ApplyVaultStaticSecret::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn wait_for_sync_is_default() { let _ = WaitForSync::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn check_secret_value_is_default() { let _ = CheckSecretValue::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cleanup_is_default() { let _ = Cleanup::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn print_result_is_default() { let _ = PrintResult::default(); }
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_constants() {
|
||||||
|
assert_eq!(TEST_NS, "ory");
|
||||||
|
assert_eq!(TEST_NAME, "vso-verify");
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -39,6 +39,8 @@ const PG_USERS: &[&str] = &[
|
|||||||
"find",
|
"find",
|
||||||
"calendars",
|
"calendars",
|
||||||
"projects",
|
"projects",
|
||||||
|
"penpot",
|
||||||
|
"stalwart",
|
||||||
];
|
];
|
||||||
|
|
||||||
const SMTP_URI: &str = "smtp://postfix.lasuite.svc.cluster.local:25/?skip_ssl_verify=true";
|
const SMTP_URI: &str = "smtp://postfix.lasuite.svc.cluster.local:25/?skip_ssl_verify=true";
|
||||||
@@ -1044,9 +1046,9 @@ mod tests {
|
|||||||
fn test_constants() {
|
fn test_constants() {
|
||||||
assert_eq!(ADMIN_USERNAME, "estudio-admin");
|
assert_eq!(ADMIN_USERNAME, "estudio-admin");
|
||||||
assert_eq!(GITEA_ADMIN_USER, "gitea_admin");
|
assert_eq!(GITEA_ADMIN_USER, "gitea_admin");
|
||||||
assert_eq!(PG_USERS.len(), 13);
|
assert_eq!(PG_USERS.len(), 15);
|
||||||
assert!(PG_USERS.contains(&"kratos"));
|
assert!(PG_USERS.contains(&"kratos"));
|
||||||
assert!(PG_USERS.contains(&"projects"));
|
assert!(PG_USERS.contains(&"stalwart"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -1109,6 +1111,8 @@ mod tests {
|
|||||||
"find",
|
"find",
|
||||||
"calendars",
|
"calendars",
|
||||||
"projects",
|
"projects",
|
||||||
|
"penpot",
|
||||||
|
"stalwart",
|
||||||
];
|
];
|
||||||
assert_eq!(PG_USERS, &expected[..]);
|
assert_eq!(PG_USERS, &expected[..]);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -454,6 +454,17 @@ pub async fn seed_openbao() -> Result<Option<SeedResult>> {
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
let stalwart = get_or_create(
|
||||||
|
&bao,
|
||||||
|
"stalwart",
|
||||||
|
&[
|
||||||
|
("admin-password", &rand_token as &dyn Fn() -> String),
|
||||||
|
("dkim-private-key", &empty_fn),
|
||||||
|
],
|
||||||
|
&mut dirty_paths,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let admin_fn = || "admin".to_string();
|
let admin_fn = || "admin".to_string();
|
||||||
let collabora = get_or_create(
|
let collabora = get_or_create(
|
||||||
&bao,
|
&bao,
|
||||||
@@ -531,6 +542,7 @@ pub async fn seed_openbao() -> Result<Option<SeedResult>> {
|
|||||||
("projects", &projects),
|
("projects", &projects),
|
||||||
("calendars", &calendars),
|
("calendars", &calendars),
|
||||||
("messages", &messages),
|
("messages", &messages),
|
||||||
|
("stalwart", &stalwart),
|
||||||
("collabora", &collabora),
|
("collabora", &collabora),
|
||||||
("tuwunel", &tuwunel),
|
("tuwunel", &tuwunel),
|
||||||
("grafana", &grafana),
|
("grafana", &grafana),
|
||||||
@@ -606,7 +618,7 @@ pub async fn seed_openbao() -> Result<Option<SeedResult>> {
|
|||||||
"auth/kubernetes/role/vso",
|
"auth/kubernetes/role/vso",
|
||||||
&serde_json::json!({
|
&serde_json::json!({
|
||||||
"bound_service_account_names": "default",
|
"bound_service_account_names": "default",
|
||||||
"bound_service_account_namespaces": "ory,devtools,storage,lasuite,matrix,media,data,monitoring",
|
"bound_service_account_namespaces": "ory,devtools,storage,lasuite,stalwart,matrix,media,data,monitoring",
|
||||||
"policies": "vso-reader",
|
"policies": "vso-reader",
|
||||||
"ttl": "1h"
|
"ttl": "1h"
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -51,11 +51,8 @@ pub enum SyncStatus {
|
|||||||
// Path helpers
|
// Path helpers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Base directory for vault keystore files.
|
/// Legacy vault dir — used only for migration.
|
||||||
fn base_dir(override_dir: Option<&Path>) -> PathBuf {
|
fn legacy_vault_dir() -> PathBuf {
|
||||||
if let Some(d) = override_dir {
|
|
||||||
return d.to_path_buf();
|
|
||||||
}
|
|
||||||
dirs::data_dir()
|
dirs::data_dir()
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
dirs::home_dir()
|
dirs::home_dir()
|
||||||
@@ -66,6 +63,41 @@ fn base_dir(override_dir: Option<&Path>) -> PathBuf {
|
|||||||
.join("vault")
|
.join("vault")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Base directory for vault keystore files: ~/.sunbeam/vault/
|
||||||
|
fn base_dir(override_dir: Option<&Path>) -> PathBuf {
|
||||||
|
if let Some(d) = override_dir {
|
||||||
|
return d.to_path_buf();
|
||||||
|
}
|
||||||
|
let new_dir = dirs::home_dir()
|
||||||
|
.unwrap_or_else(|| PathBuf::from("."))
|
||||||
|
.join(".sunbeam")
|
||||||
|
.join("vault");
|
||||||
|
|
||||||
|
// Migration: copy files from legacy location if new dir doesn't exist yet
|
||||||
|
if !new_dir.exists() {
|
||||||
|
let legacy = legacy_vault_dir();
|
||||||
|
if legacy.is_dir() {
|
||||||
|
let _ = std::fs::create_dir_all(&new_dir);
|
||||||
|
if let Ok(entries) = std::fs::read_dir(&legacy) {
|
||||||
|
for entry in entries.flatten() {
|
||||||
|
let dest = new_dir.join(entry.file_name());
|
||||||
|
let _ = std::fs::copy(entry.path(), &dest);
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
let _ = std::fs::set_permissions(
|
||||||
|
&dest,
|
||||||
|
std::fs::Permissions::from_mode(0o600),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
new_dir
|
||||||
|
}
|
||||||
|
|
||||||
/// Path to the encrypted keystore file for a domain.
|
/// Path to the encrypted keystore file for a domain.
|
||||||
pub fn keystore_path(domain: &str) -> PathBuf {
|
pub fn keystore_path(domain: &str) -> PathBuf {
|
||||||
keystore_path_in(domain, None)
|
keystore_path_in(domain, None)
|
||||||
@@ -83,6 +115,11 @@ pub fn keystore_exists(domain: &str) -> bool {
|
|||||||
keystore_path(domain).exists()
|
keystore_path(domain).exists()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether a keystore exists in a specific directory (context-aware).
|
||||||
|
pub fn keystore_exists_at(domain: &str, dir: &Path) -> bool {
|
||||||
|
keystore_path_in(domain, Some(dir)).exists()
|
||||||
|
}
|
||||||
|
|
||||||
fn keystore_exists_in(domain: &str, dir: Option<&Path>) -> bool {
|
fn keystore_exists_in(domain: &str, dir: Option<&Path>) -> bool {
|
||||||
keystore_path_in(domain, dir).exists()
|
keystore_path_in(domain, dir).exists()
|
||||||
}
|
}
|
||||||
@@ -92,7 +129,13 @@ fn keystore_exists_in(domain: &str, dir: Option<&Path>) -> bool {
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
fn machine_salt_path(override_dir: Option<&Path>) -> PathBuf {
|
fn machine_salt_path(override_dir: Option<&Path>) -> PathBuf {
|
||||||
base_dir(override_dir).join(".machine-salt")
|
if let Some(d) = override_dir {
|
||||||
|
return d.join(".machine-salt");
|
||||||
|
}
|
||||||
|
dirs::home_dir()
|
||||||
|
.unwrap_or_else(|| PathBuf::from("."))
|
||||||
|
.join(".sunbeam")
|
||||||
|
.join(".machine-salt")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_or_create_machine_salt(override_dir: Option<&Path>) -> Result<Vec<u8>> {
|
fn load_or_create_machine_salt(override_dir: Option<&Path>) -> Result<Vec<u8>> {
|
||||||
@@ -203,11 +246,16 @@ fn decrypt(data: &[u8], domain: &str, override_dir: Option<&Path>) -> Result<Vec
|
|||||||
// Public API
|
// Public API
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
/// Save a keystore, encrypted, to the local filesystem.
|
/// Save a keystore, encrypted, to the local filesystem (default dir).
|
||||||
pub fn save_keystore(ks: &VaultKeystore) -> Result<()> {
|
pub fn save_keystore(ks: &VaultKeystore) -> Result<()> {
|
||||||
save_keystore_in(ks, None)
|
save_keystore_in(ks, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Save a keystore to a specific directory (context-aware).
|
||||||
|
pub fn save_keystore_to(ks: &VaultKeystore, dir: &Path) -> Result<()> {
|
||||||
|
save_keystore_in(ks, Some(dir))
|
||||||
|
}
|
||||||
|
|
||||||
fn save_keystore_in(ks: &VaultKeystore, override_dir: Option<&Path>) -> Result<()> {
|
fn save_keystore_in(ks: &VaultKeystore, override_dir: Option<&Path>) -> Result<()> {
|
||||||
let path = keystore_path_in(&ks.domain, override_dir);
|
let path = keystore_path_in(&ks.domain, override_dir);
|
||||||
|
|
||||||
@@ -235,11 +283,16 @@ fn save_keystore_in(ks: &VaultKeystore, override_dir: Option<&Path>) -> Result<(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load and decrypt a keystore from the local filesystem.
|
/// Load and decrypt a keystore from the local filesystem (default dir).
|
||||||
pub fn load_keystore(domain: &str) -> Result<VaultKeystore> {
|
pub fn load_keystore(domain: &str) -> Result<VaultKeystore> {
|
||||||
load_keystore_in(domain, None)
|
load_keystore_in(domain, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Load a keystore from a specific directory (context-aware).
|
||||||
|
pub fn load_keystore_from(domain: &str, dir: &Path) -> Result<VaultKeystore> {
|
||||||
|
load_keystore_in(domain, Some(dir))
|
||||||
|
}
|
||||||
|
|
||||||
fn load_keystore_in(domain: &str, override_dir: Option<&Path>) -> Result<VaultKeystore> {
|
fn load_keystore_in(domain: &str, override_dir: Option<&Path>) -> Result<VaultKeystore> {
|
||||||
let path = keystore_path_in(domain, override_dir);
|
let path = keystore_path_in(domain, override_dir);
|
||||||
if !path.exists() {
|
if !path.exists() {
|
||||||
@@ -275,6 +328,11 @@ pub fn verify_vault_keys(domain: &str) -> Result<VaultKeystore> {
|
|||||||
verify_vault_keys_in(domain, None)
|
verify_vault_keys_in(domain, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Verify vault keys from a specific directory (context-aware).
|
||||||
|
pub fn verify_vault_keys_from(domain: &str, dir: &Path) -> Result<VaultKeystore> {
|
||||||
|
verify_vault_keys_in(domain, Some(dir))
|
||||||
|
}
|
||||||
|
|
||||||
fn verify_vault_keys_in(domain: &str, override_dir: Option<&Path>) -> Result<VaultKeystore> {
|
fn verify_vault_keys_in(domain: &str, override_dir: Option<&Path>) -> Result<VaultKeystore> {
|
||||||
let ks = load_keystore_in(domain, override_dir)?;
|
let ks = load_keystore_in(domain, override_dir)?;
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user